delete un-used code. (#218)

This commit is contained in:
Tingluo Huang
2019-12-16 17:05:26 -05:00
committed by GitHub
parent c3c66bb14a
commit d0a4a41a63
582 changed files with 155 additions and 66274 deletions

View File

@@ -1,769 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Runtime.Serialization;
using System.Text.RegularExpressions;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public static class AgentJobRequestMessageUtil
{
// Legacy JobRequestMessage -> Pipeline JobRequestMessage
// Used by the agent when the latest version agent connect to old version TFS
// Used by the server when common method only take the new Message contact, like, telemetry logging
public static AgentJobRequestMessage Convert(WebApi.AgentJobRequestMessage message)
{
// construct steps
List<JobStep> jobSteps = new List<JobStep>();
foreach (var task in message.Tasks)
{
TaskStep taskStep = new TaskStep(task);
jobSteps.Add(taskStep);
}
Dictionary<String, VariableValue> variables = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
HashSet<MaskHint> maskHints = new HashSet<MaskHint>();
JobResources jobResources = new JobResources();
WorkspaceOptions workspace = new WorkspaceOptions();
message.Environment.Extract(variables, maskHints, jobResources);
// convert repository endpoint into checkout task for Build
if (string.Equals(message.Plan.PlanType, "Build", StringComparison.OrdinalIgnoreCase))
{
// repositoryId was added sometime after TFS2015, so we need to fall back to find endpoint using endpoint type.
var legacyRepoEndpoint = jobResources.Endpoints.FirstOrDefault(x => x.Data.ContainsKey("repositoryId"));
if (legacyRepoEndpoint == null)
{
legacyRepoEndpoint = jobResources.Endpoints.FirstOrDefault(x => x.Type == LegacyRepositoryTypes.Bitbucket || x.Type == LegacyRepositoryTypes.Git || x.Type == LegacyRepositoryTypes.TfsGit || x.Type == LegacyRepositoryTypes.GitHub || x.Type == LegacyRepositoryTypes.GitHubEnterprise || x.Type == LegacyRepositoryTypes.TfsVersionControl);
}
// build retention job will not have a repo endpoint.
if (legacyRepoEndpoint != null)
{
// construct checkout task
var checkoutStep = new TaskStep();
checkoutStep.Id = Guid.NewGuid();
checkoutStep.DisplayName = PipelineConstants.CheckoutTask.FriendlyName;
checkoutStep.Name = "__system_checkout";
checkoutStep.Reference = new TaskStepDefinitionReference()
{
Id = PipelineConstants.CheckoutTask.Id,
Name = PipelineConstants.CheckoutTask.Name,
Version = PipelineConstants.CheckoutTask.Version,
};
checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Repository] = "__legacy_repo_endpoint";
// construct self repository resource
var defaultRepo = new RepositoryResource();
defaultRepo.Alias = "__legacy_repo_endpoint";
defaultRepo.Properties.Set<String>(RepositoryPropertyNames.Name, legacyRepoEndpoint.Name);
legacyRepoEndpoint.Data.TryGetValue("repositoryId", out string repositoryId);
if (!string.IsNullOrEmpty(repositoryId))
{
defaultRepo.Id = repositoryId;
}
else
{
defaultRepo.Id = "__legacy_repo_endpoint";
}
defaultRepo.Endpoint = new ServiceEndpointReference()
{
Id = Guid.Empty,
Name = legacyRepoEndpoint.Name
};
defaultRepo.Type = ConvertLegacySourceType(legacyRepoEndpoint.Type);
defaultRepo.Url = legacyRepoEndpoint.Url;
if (variables.TryGetValue("build.sourceVersion", out VariableValue sourceVersion) && !string.IsNullOrEmpty(sourceVersion?.Value))
{
defaultRepo.Version = sourceVersion.Value;
}
if (variables.TryGetValue("build.sourceBranch", out VariableValue sourceBranch) && !string.IsNullOrEmpty(sourceBranch?.Value))
{
defaultRepo.Properties.Set<string>(RepositoryPropertyNames.Ref, sourceBranch.Value);
}
VersionInfo versionInfo = null;
if (variables.TryGetValue("build.sourceVersionAuthor", out VariableValue sourceAuthor) && !string.IsNullOrEmpty(sourceAuthor?.Value))
{
versionInfo = new VersionInfo();
versionInfo.Author = sourceAuthor.Value;
}
if (variables.TryGetValue("build.sourceVersionMessage", out VariableValue sourceMessage) && !string.IsNullOrEmpty(sourceMessage?.Value))
{
if (versionInfo == null)
{
versionInfo = new VersionInfo();
}
versionInfo.Message = sourceMessage.Value;
}
if (versionInfo != null)
{
defaultRepo.Properties.Set<VersionInfo>(RepositoryPropertyNames.VersionInfo, versionInfo);
}
if (defaultRepo.Type == RepositoryTypes.Tfvc)
{
if (variables.TryGetValue("build.sourceTfvcShelveset", out VariableValue shelveset) && !string.IsNullOrEmpty(shelveset?.Value))
{
defaultRepo.Properties.Set<string>(RepositoryPropertyNames.Shelveset, shelveset.Value);
}
var legacyTfvcMappingJson = legacyRepoEndpoint.Data["tfvcWorkspaceMapping"];
var legacyTfvcMapping = JsonUtility.FromString<LegacyBuildWorkspace>(legacyTfvcMappingJson);
if (legacyTfvcMapping != null)
{
IList<WorkspaceMapping> tfvcMapping = new List<WorkspaceMapping>();
foreach (var mapping in legacyTfvcMapping.Mappings)
{
tfvcMapping.Add(new WorkspaceMapping() { ServerPath = mapping.ServerPath, LocalPath = mapping.LocalPath, Exclude = String.Equals(mapping.MappingType, "cloak", StringComparison.OrdinalIgnoreCase) });
}
defaultRepo.Properties.Set<IList<WorkspaceMapping>>(RepositoryPropertyNames.Mappings, tfvcMapping);
}
}
else if (defaultRepo.Type == RepositoryTypes.Svn)
{
var legacySvnMappingJson = legacyRepoEndpoint.Data["svnWorkspaceMapping"];
var legacySvnMapping = JsonUtility.FromString<LegacySvnWorkspace>(legacySvnMappingJson);
if (legacySvnMapping != null)
{
IList<WorkspaceMapping> svnMapping = new List<WorkspaceMapping>();
foreach (var mapping in legacySvnMapping.Mappings)
{
svnMapping.Add(new WorkspaceMapping() { ServerPath = mapping.ServerPath, LocalPath = mapping.LocalPath, Depth = mapping.Depth, IgnoreExternals = mapping.IgnoreExternals, Revision = mapping.Revision });
}
defaultRepo.Properties.Set<IList<WorkspaceMapping>>(RepositoryPropertyNames.Mappings, svnMapping);
}
}
legacyRepoEndpoint.Data.TryGetValue("clean", out string cleanString);
if (!string.IsNullOrEmpty(cleanString))
{
checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Clean] = cleanString;
}
else
{
// Checkout task has clean set tp false as default.
checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Clean] = Boolean.FalseString;
}
if (legacyRepoEndpoint.Data.TryGetValue("checkoutSubmodules", out string checkoutSubmodulesString) &&
Boolean.TryParse(checkoutSubmodulesString, out Boolean checkoutSubmodules) &&
checkoutSubmodules)
{
if (legacyRepoEndpoint.Data.TryGetValue("checkoutNestedSubmodules", out string nestedSubmodulesString) &&
Boolean.TryParse(nestedSubmodulesString, out Boolean nestedSubmodules) &&
nestedSubmodules)
{
checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Submodules] = PipelineConstants.CheckoutTaskInputs.SubmodulesOptions.Recursive;
}
else
{
checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Submodules] = PipelineConstants.CheckoutTaskInputs.SubmodulesOptions.True;
}
}
if (legacyRepoEndpoint.Data.ContainsKey("fetchDepth"))
{
checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.FetchDepth] = legacyRepoEndpoint.Data["fetchDepth"];
}
if (legacyRepoEndpoint.Data.ContainsKey("gitLfsSupport"))
{
checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Lfs] = legacyRepoEndpoint.Data["gitLfsSupport"];
}
if (VariableUtility.GetEnableAccessTokenType(variables) == EnableAccessTokenType.Variable)
{
checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.PersistCredentials] = Boolean.TrueString;
}
// construct worksapce option
if (Boolean.TryParse(cleanString, out Boolean clean) && clean)
{
if (legacyRepoEndpoint.Data.TryGetValue("cleanOptions", out string cleanOptionsString) && !string.IsNullOrEmpty(cleanOptionsString))
{
if (string.Equals(cleanOptionsString, "1", StringComparison.OrdinalIgnoreCase)) //RepositoryCleanOptions.SourceAndOutputDir
{
workspace.Clean = PipelineConstants.WorkspaceCleanOptions.Outputs;
}
else if (string.Equals(cleanOptionsString, "2", StringComparison.OrdinalIgnoreCase)) //RepositoryCleanOptions.SourceDir
{
workspace.Clean = PipelineConstants.WorkspaceCleanOptions.Resources;
}
else if (string.Equals(cleanOptionsString, "3", StringComparison.OrdinalIgnoreCase)) //RepositoryCleanOptions.AllBuildDir
{
workspace.Clean = PipelineConstants.WorkspaceCleanOptions.All;
}
}
}
// add checkout task when build.syncsources and skipSyncSource not set
variables.TryGetValue("build.syncSources", out VariableValue syncSourcesVariable);
legacyRepoEndpoint.Data.TryGetValue("skipSyncSource", out string skipSyncSource);
if (!string.IsNullOrEmpty(syncSourcesVariable?.Value) && Boolean.TryParse(syncSourcesVariable?.Value, out bool syncSource) && !syncSource)
{
checkoutStep.Condition = bool.FalseString;
}
else if (Boolean.TryParse(skipSyncSource, out bool skipSource) && skipSource)
{
checkoutStep.Condition = bool.FalseString;
}
jobSteps.Insert(0, checkoutStep);
// always add self repository to job resource
jobResources.Repositories.Add(defaultRepo);
}
}
AgentJobRequestMessage agentRequestMessage = new AgentJobRequestMessage(message.Plan, message.Timeline, message.JobId, message.JobName, message.JobRefName, null, null, null, variables, maskHints.ToList(), jobResources, null, workspace, jobSteps, null)
{
RequestId = message.RequestId
};
return agentRequestMessage;
}
// Pipeline JobRequestMessage -> Legacy JobRequestMessage
// Used by the server when the connected agent is old version and doesn't support new contract yet.
public static WebApi.AgentJobRequestMessage Convert(AgentJobRequestMessage message)
{
// Old agent can't handle container(s)
if (message.JobContainer != null)
{
throw new NotSupportedException("Job containers are not supported");
}
if (message.JobServiceContainers != null)
{
throw new NotSupportedException("Job service containers are not supported");
}
// Old agent can't handle more than 1 repository
if (message.Resources.Repositories.Count > 1)
{
throw new NotSupportedException(string.Join(", ", message.Resources.Repositories.Select(x => x.Alias)));
}
// Old agent can't handle more than 1 checkout task
if (message.Steps.Where(x => x.IsCheckoutTask()).Count() > 1)
{
throw new NotSupportedException(PipelineConstants.CheckoutTask.Id.ToString("D"));
}
// construct tasks
List<TaskInstance> tasks = new List<TaskInstance>();
foreach (var step in message.Steps)
{
// Pipeline builder should add min agent demand when steps contains group
if (step.Type != StepType.Task)
{
throw new NotSupportedException(step.Type.ToString());
}
// don't add checkout task, we need to convert the checkout task into endpoint
if (!step.IsCheckoutTask())
{
TaskInstance task = (step as TaskStep).ToLegacyTaskInstance();
tasks.Add(task);
}
}
if (message.Resources != null)
{
foreach (var endpoint in message.Resources.Endpoints)
{
// Legacy message require all endpoint's name equals to endpoint's id
// Guid.Empty is for repository endpoints
if (!String.Equals(endpoint.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase) &&
endpoint.Id != Guid.Empty)
{
endpoint.Name = endpoint.Id.ToString("D");
}
}
// Make sure we propagate download ticket into the mask hints
foreach (var secureFile in message.Resources.SecureFiles)
{
if (!String.IsNullOrEmpty(secureFile.Ticket))
{
message.MaskHints.Add(new MaskHint() { Type = MaskType.Regex, Value = Regex.Escape(secureFile.Ticket) });
}
}
}
if (String.Equals(message.Plan.PlanType, "Build", StringComparison.OrdinalIgnoreCase))
{
// create repository endpoint base on checkout task + repository resource + repository endpoint
// repoResource might be null when environment verion is still on 1
var repoResource = message.Resources?.Repositories.SingleOrDefault();
if (repoResource != null)
{
var legacyRepoEndpoint = new ServiceEndpoint();
legacyRepoEndpoint.Name = repoResource.Properties.Get<string>(RepositoryPropertyNames.Name);
legacyRepoEndpoint.Type = ConvertToLegacySourceType(repoResource.Type);
legacyRepoEndpoint.Url = repoResource.Url;
if (repoResource.Endpoint != null)
{
var referencedEndpoint = message.Resources.Endpoints.First(x => (x.Id == repoResource.Endpoint.Id && x.Id != Guid.Empty) || (String.Equals(x.Name, repoResource.Endpoint.Name?.Literal, StringComparison.OrdinalIgnoreCase) && x.Id == Guid.Empty && repoResource.Endpoint.Id == Guid.Empty));
var endpointAuthCopy = referencedEndpoint.Authorization?.Clone();
if (endpointAuthCopy != null)
{
if (endpointAuthCopy.Scheme == EndpointAuthorizationSchemes.Token) //InstallationToken (Tabby) or ApiToken (GithubEnterprise)
{
if (referencedEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out string accessToken)) //Tabby
{
legacyRepoEndpoint.Authorization = new EndpointAuthorization()
{
Scheme = EndpointAuthorizationSchemes.UsernamePassword,
Parameters =
{
{ EndpointAuthorizationParameters.Username, "x-access-token" },
{ EndpointAuthorizationParameters.Password, accessToken }
}
};
}
else if (referencedEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.ApiToken, out string apiToken)) //GithubEnterprise
{
legacyRepoEndpoint.Authorization = new EndpointAuthorization()
{
Scheme = EndpointAuthorizationSchemes.UsernamePassword,
Parameters =
{
{ EndpointAuthorizationParameters.Username, apiToken },
{ EndpointAuthorizationParameters.Password, "x-oauth-basic" }
}
};
}
}
else if (endpointAuthCopy.Scheme == EndpointAuthorizationSchemes.PersonalAccessToken) // Github
{
if (referencedEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out string accessToken)) //Tabby
{
legacyRepoEndpoint.Authorization = new EndpointAuthorization()
{
Scheme = EndpointAuthorizationSchemes.UsernamePassword,
Parameters =
{
{ EndpointAuthorizationParameters.Username, "pat" },
{ EndpointAuthorizationParameters.Password, accessToken }
}
};
}
}
else
{
legacyRepoEndpoint.Authorization = endpointAuthCopy;
}
}
// there are 2 properties we put into the legacy repo endpoint directly from connect endpoint
if (referencedEndpoint.Data.TryGetValue("acceptUntrustedCerts", out String acceptUntrustedCerts))
{
legacyRepoEndpoint.Data["acceptUntrustedCerts"] = acceptUntrustedCerts;
}
if (referencedEndpoint.Data.TryGetValue("realmName", out String realmName))
{
legacyRepoEndpoint.Data["realmName"] = realmName;
}
}
legacyRepoEndpoint.Data["repositoryId"] = repoResource.Id;
// default values in the old message format
legacyRepoEndpoint.Data["clean"] = Boolean.FalseString;
legacyRepoEndpoint.Data["checkoutSubmodules"] = Boolean.FalseString;
legacyRepoEndpoint.Data["checkoutNestedSubmodules"] = Boolean.FalseString;
legacyRepoEndpoint.Data["fetchDepth"] = "0";
legacyRepoEndpoint.Data["gitLfsSupport"] = Boolean.FalseString;
legacyRepoEndpoint.Data["skipSyncSource"] = Boolean.FalseString;
legacyRepoEndpoint.Data["cleanOptions"] = "0";
legacyRepoEndpoint.Data["rootFolder"] = null; // old tfvc repo endpoint has this set to $/foo, but it doesn't seems to be used at all.
if (repoResource.Type == RepositoryTypes.Tfvc)
{
var tfvcMapping = repoResource.Properties.Get<IList<WorkspaceMapping>>(RepositoryPropertyNames.Mappings);
if (tfvcMapping != null)
{
LegacyBuildWorkspace legacyMapping = new LegacyBuildWorkspace();
foreach (var mapping in tfvcMapping)
{
legacyMapping.Mappings.Add(new LegacyMappingDetails() { ServerPath = mapping.ServerPath, LocalPath = mapping.LocalPath, MappingType = mapping.Exclude ? "cloak" : "map" });
}
legacyRepoEndpoint.Data["tfvcWorkspaceMapping"] = JsonUtility.ToString(legacyMapping);
}
}
else if (repoResource.Type == RepositoryTypes.Svn)
{
var svnMapping = repoResource.Properties.Get<IList<WorkspaceMapping>>(RepositoryPropertyNames.Mappings);
if (svnMapping != null)
{
LegacySvnWorkspace legacyMapping = new LegacySvnWorkspace();
foreach (var mapping in svnMapping)
{
legacyMapping.Mappings.Add(new LegacySvnMappingDetails() { ServerPath = mapping.ServerPath, LocalPath = mapping.LocalPath, Depth = mapping.Depth, IgnoreExternals = mapping.IgnoreExternals, Revision = mapping.Revision });
}
legacyRepoEndpoint.Data["svnWorkspaceMapping"] = JsonUtility.ToString(legacyMapping);
}
}
else if (repoResource.Type == RepositoryTypes.Git)
{
if (message.Variables.TryGetValue(WellKnownDistributedTaskVariables.ServerType, out VariableValue serverType) && String.Equals(serverType?.Value, "Hosted", StringComparison.OrdinalIgnoreCase))
{
legacyRepoEndpoint.Data["onpremtfsgit"] = Boolean.FalseString;
}
else
{
legacyRepoEndpoint.Data["onpremtfsgit"] = Boolean.TrueString;
}
}
if (!message.Variables.ContainsKey("build.repository.id") || String.IsNullOrEmpty(message.Variables["build.repository.id"]?.Value))
{
message.Variables["build.repository.id"] = repoResource.Id;
}
if (!message.Variables.ContainsKey("build.repository.name") || String.IsNullOrEmpty(message.Variables["build.repository.name"]?.Value))
{
message.Variables["build.repository.name"] = repoResource.Properties.Get<String>(RepositoryPropertyNames.Name);
}
if (!message.Variables.ContainsKey("build.repository.uri") || String.IsNullOrEmpty(message.Variables["build.repository.uri"]?.Value))
{
message.Variables["build.repository.uri"] = repoResource.Url.AbsoluteUri;
}
var versionInfo = repoResource.Properties.Get<VersionInfo>(RepositoryPropertyNames.VersionInfo);
if (!message.Variables.ContainsKey("build.sourceVersionAuthor") || String.IsNullOrEmpty(message.Variables["build.sourceVersionAuthor"]?.Value))
{
message.Variables["build.sourceVersionAuthor"] = versionInfo?.Author;
}
if (!message.Variables.ContainsKey("build.sourceVersionMessage") || String.IsNullOrEmpty(message.Variables["build.sourceVersionMessage"]?.Value))
{
message.Variables["build.sourceVersionMessage"] = versionInfo?.Message;
}
if (!message.Variables.ContainsKey("build.sourceVersion") || String.IsNullOrEmpty(message.Variables["build.sourceVersion"]?.Value))
{
message.Variables["build.sourceVersion"] = repoResource.Version;
}
if (!message.Variables.ContainsKey("build.sourceBranch") || String.IsNullOrEmpty(message.Variables["build.sourceBranch"]?.Value))
{
message.Variables["build.sourceBranch"] = repoResource.Properties.Get<String>(RepositoryPropertyNames.Ref);
}
if (repoResource.Type == RepositoryTypes.Tfvc)
{
var shelveset = repoResource.Properties.Get<String>(RepositoryPropertyNames.Shelveset);
if (!String.IsNullOrEmpty(shelveset) && (!message.Variables.ContainsKey("build.sourceTfvcShelveset") || String.IsNullOrEmpty(message.Variables["build.sourceTfvcShelveset"]?.Value)))
{
message.Variables["build.sourceTfvcShelveset"] = shelveset;
}
}
TaskStep checkoutTask = message.Steps.FirstOrDefault(x => x.IsCheckoutTask()) as TaskStep;
if (checkoutTask != null)
{
if (checkoutTask.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.Clean, out string taskInputClean) && !string.IsNullOrEmpty(taskInputClean))
{
legacyRepoEndpoint.Data["clean"] = taskInputClean;
}
else
{
legacyRepoEndpoint.Data["clean"] = Boolean.FalseString;
}
if (checkoutTask.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.Submodules, out string taskInputSubmodules) && !string.IsNullOrEmpty(taskInputSubmodules))
{
legacyRepoEndpoint.Data["checkoutSubmodules"] = Boolean.TrueString;
if (String.Equals(taskInputSubmodules, PipelineConstants.CheckoutTaskInputs.SubmodulesOptions.Recursive, StringComparison.OrdinalIgnoreCase))
{
legacyRepoEndpoint.Data["checkoutNestedSubmodules"] = Boolean.TrueString;
}
}
if (checkoutTask.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.FetchDepth, out string taskInputFetchDepth) && !string.IsNullOrEmpty(taskInputFetchDepth))
{
legacyRepoEndpoint.Data["fetchDepth"] = taskInputFetchDepth;
}
if (checkoutTask.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.Lfs, out string taskInputfs) && !string.IsNullOrEmpty(taskInputfs))
{
legacyRepoEndpoint.Data["gitLfsSupport"] = taskInputfs;
}
// Skip sync sources
if (String.Equals(checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.Repository], PipelineConstants.NoneAlias, StringComparison.OrdinalIgnoreCase))
{
legacyRepoEndpoint.Data["skipSyncSource"] = Boolean.TrueString;
}
else if (String.Equals(checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.Repository], PipelineConstants.DesignerRepo, StringComparison.OrdinalIgnoreCase) && checkoutTask.Condition == Boolean.FalseString)
{
legacyRepoEndpoint.Data["skipSyncSource"] = Boolean.TrueString;
}
}
// workspace clean options
legacyRepoEndpoint.Data["cleanOptions"] = "0"; // RepositoryCleanOptions.Source;
if (message.Workspace != null)
{
if (String.Equals(message.Workspace.Clean, PipelineConstants.WorkspaceCleanOptions.Outputs, StringComparison.OrdinalIgnoreCase))
{
legacyRepoEndpoint.Data["cleanOptions"] = "1"; // RepositoryCleanOptions.SourceAndOutputDir;
}
else if (String.Equals(message.Workspace.Clean, PipelineConstants.WorkspaceCleanOptions.Resources, StringComparison.OrdinalIgnoreCase))
{
legacyRepoEndpoint.Data["cleanOptions"] = "2"; //RepositoryCleanOptions.SourceDir;
}
else if (String.Equals(message.Workspace.Clean, PipelineConstants.WorkspaceCleanOptions.All, StringComparison.OrdinalIgnoreCase))
{
legacyRepoEndpoint.Data["cleanOptions"] = "3"; // RepositoryCleanOptions.AllBuildDir;
}
}
// add reposiotry endpoint to environment
message.Resources.Endpoints.Add(legacyRepoEndpoint);
}
}
JobEnvironment environment = new JobEnvironment(message.Variables, message.MaskHints, message.Resources);
WebApi.AgentJobRequestMessage legacyAgentRequestMessage = new WebApi.AgentJobRequestMessage(message.Plan, message.Timeline, message.JobId, message.JobDisplayName, message.JobName, environment, tasks)
{
RequestId = message.RequestId
};
return legacyAgentRequestMessage;
}
private static string ConvertLegacySourceType(string legacySourceType)
{
if (String.Equals(legacySourceType, LegacyRepositoryTypes.Bitbucket, StringComparison.OrdinalIgnoreCase))
{
return RepositoryTypes.Bitbucket;
}
else if (String.Equals(legacySourceType, LegacyRepositoryTypes.Git, StringComparison.OrdinalIgnoreCase))
{
return RepositoryTypes.ExternalGit;
}
else if (String.Equals(legacySourceType, LegacyRepositoryTypes.TfsGit, StringComparison.OrdinalIgnoreCase))
{
return RepositoryTypes.Git;
}
else if (String.Equals(legacySourceType, LegacyRepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
{
return RepositoryTypes.GitHub;
}
else if (String.Equals(legacySourceType, LegacyRepositoryTypes.GitHubEnterprise, StringComparison.OrdinalIgnoreCase))
{
return RepositoryTypes.GitHubEnterprise;
}
else if (String.Equals(legacySourceType, LegacyRepositoryTypes.Svn, StringComparison.OrdinalIgnoreCase))
{
return RepositoryTypes.Svn;
}
else if (String.Equals(legacySourceType, LegacyRepositoryTypes.TfsVersionControl, StringComparison.OrdinalIgnoreCase))
{
return RepositoryTypes.Tfvc;
}
else
{
throw new NotSupportedException(legacySourceType);
}
}
private static string ConvertToLegacySourceType(string pipelineSourceType)
{
if (String.Equals(pipelineSourceType, RepositoryTypes.Bitbucket, StringComparison.OrdinalIgnoreCase))
{
return LegacyRepositoryTypes.Bitbucket;
}
else if (String.Equals(pipelineSourceType, RepositoryTypes.ExternalGit, StringComparison.OrdinalIgnoreCase))
{
return LegacyRepositoryTypes.Git;
}
else if (String.Equals(pipelineSourceType, RepositoryTypes.Git, StringComparison.OrdinalIgnoreCase))
{
return LegacyRepositoryTypes.TfsGit;
}
else if (String.Equals(pipelineSourceType, RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
{
return LegacyRepositoryTypes.GitHub;
}
else if (String.Equals(pipelineSourceType, RepositoryTypes.GitHubEnterprise, StringComparison.OrdinalIgnoreCase))
{
return LegacyRepositoryTypes.GitHubEnterprise;
}
else if (String.Equals(pipelineSourceType, RepositoryTypes.Svn, StringComparison.OrdinalIgnoreCase))
{
return LegacyRepositoryTypes.Svn;
}
else if (String.Equals(pipelineSourceType, RepositoryTypes.Tfvc, StringComparison.OrdinalIgnoreCase))
{
return LegacyRepositoryTypes.TfsVersionControl;
}
else
{
throw new NotSupportedException(pipelineSourceType);
}
}
private static class LegacyRepositoryTypes // Copy from Build.Webapi
{
public const String TfsVersionControl = "TfsVersionControl";
public const String TfsGit = "TfsGit";
public const String Git = "Git";
public const String GitHub = "GitHub";
public const String GitHubEnterprise = "GitHubEnterprise";
public const String Bitbucket = "Bitbucket";
public const String Svn = "Svn";
}
/// <summary>
/// Represents an entry in a workspace mapping.
/// </summary>
[DataContract]
private class LegacyMappingDetails
{
/// <summary>
/// The server path.
/// </summary>
[DataMember(Name = "serverPath")]
public String ServerPath
{
get;
set;
}
/// <summary>
/// The mapping type.
/// </summary>
[DataMember(Name = "mappingType")]
public String MappingType
{
get;
set;
}
/// <summary>
/// The local path.
/// </summary>
[DataMember(Name = "localPath")]
public String LocalPath
{
get;
set;
}
}
/// <summary>
/// Represents a workspace mapping.
/// </summary>
[DataContract]
private class LegacyBuildWorkspace
{
/// <summary>
/// The list of workspace mapping entries.
/// </summary>
public List<LegacyMappingDetails> Mappings
{
get
{
if (m_mappings == null)
{
m_mappings = new List<LegacyMappingDetails>();
}
return m_mappings;
}
}
[DataMember(Name = "mappings")]
private List<LegacyMappingDetails> m_mappings;
}
/// <summary>
/// Represents a Subversion mapping entry.
/// </summary>
[DataContract]
private class LegacySvnMappingDetails
{
/// <summary>
/// The server path.
/// </summary>
[DataMember(Name = "serverPath")]
public String ServerPath
{
get;
set;
}
/// <summary>
/// The local path.
/// </summary>
[DataMember(Name = "localPath")]
public String LocalPath
{
get;
set;
}
/// <summary>
/// The revision.
/// </summary>
[DataMember(Name = "revision")]
public String Revision
{
get;
set;
}
/// <summary>
/// The depth.
/// </summary>
[DataMember(Name = "depth")]
public Int32 Depth
{
get;
set;
}
/// <summary>
/// Indicates whether to ignore externals.
/// </summary>
[DataMember(Name = "ignoreExternals")]
public bool IgnoreExternals
{
get;
set;
}
}
/// <summary>
/// Represents a subversion workspace.
/// </summary>
[DataContract]
private class LegacySvnWorkspace
{
/// <summary>
/// The list of mappings.
/// </summary>
public List<LegacySvnMappingDetails> Mappings
{
get
{
if (m_Mappings == null)
{
m_Mappings = new List<LegacySvnMappingDetails>();
}
return m_Mappings;
}
}
[DataMember(Name = "mappings")]
private List<LegacySvnMappingDetails> m_Mappings;
}
}
}

View File

@@ -1,38 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class AgentPoolReference : ResourceReference
{
public AgentPoolReference()
{
}
private AgentPoolReference(AgentPoolReference referenceToCopy)
: base(referenceToCopy)
{
this.Id = referenceToCopy.Id;
}
[DataMember(EmitDefaultValue = false)]
public Int32 Id
{
get;
set;
}
public AgentPoolReference Clone()
{
return new AgentPoolReference(this);
}
public override String ToString()
{
return base.ToString() ?? this.Id.ToString();
}
}
}

View File

@@ -1,116 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public class AgentPoolStore : IAgentPoolStore
{
public AgentPoolStore(
IList<TaskAgentPool> pools,
IAgentPoolResolver resolver = null)
{
this.Resolver = resolver;
Add(pools?.ToArray());
}
/// <summary>
/// Get the queue resolver configured for this store.
/// </summary>
public IAgentPoolResolver Resolver
{
get;
}
public void Authorize(IList<AgentPoolReference> pools)
{
if (pools?.Count > 0)
{
foreach (var pool in pools)
{
var authorizedResource = this.Resolver?.Resolve(pool);
if (authorizedResource != null)
{
Add(authorizedResource);
}
}
}
}
public IList<AgentPoolReference> GetAuthorizedReferences()
{
return m_resourcesById.Values.Select(x => new AgentPoolReference { Id = x.Id }).ToList();
}
public TaskAgentPool Get(AgentPoolReference reference)
{
if (reference == null)
{
return null;
}
var referenceId = reference.Id;
var referenceName = reference.Name?.Literal;
if (reference.Id == 0 && String.IsNullOrEmpty(referenceName))
{
return null;
}
TaskAgentPool authorizedResource = null;
if (referenceId != 0)
{
if (m_resourcesById.TryGetValue(referenceId, out authorizedResource))
{
return authorizedResource;
}
}
else if (!String.IsNullOrEmpty(referenceName))
{
if (m_resourcesByName.TryGetValue(referenceName, out authorizedResource))
{
return authorizedResource;
}
}
// If we have an authorizer then attempt to authorize the reference for use
authorizedResource = this.Resolver?.Resolve(reference);
if (authorizedResource != null)
{
Add(authorizedResource);
}
return authorizedResource;
}
private void Add(params TaskAgentPool[] resources)
{
if (resources?.Length > 0)
{
foreach (var resource in resources)
{
// Track by ID
if (m_resourcesById.TryGetValue(resource.Id, out _))
{
continue;
}
m_resourcesById.Add(resource.Id, resource);
// Track by name
if (m_resourcesByName.TryGetValue(resource.Name, out _))
{
continue;
}
m_resourcesByName.Add(resource.Name, resource);
}
}
}
private readonly Dictionary<Int32, TaskAgentPool> m_resourcesById = new Dictionary<Int32, TaskAgentPool>();
private readonly Dictionary<String, TaskAgentPool> m_resourcesByName = new Dictionary<String, TaskAgentPool>(StringComparer.OrdinalIgnoreCase);
}
}

View File

@@ -1,169 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.DistributedTask.Pipelines.Runtime;
using GitHub.DistributedTask.Pipelines.Validation;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using Newtonsoft.Json.Linq;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public class AgentPoolTarget : PhaseTarget
{
public AgentPoolTarget()
: base(PhaseTargetType.Pool)
{
}
private AgentPoolTarget(AgentPoolTarget targetToClone)
: base(targetToClone)
{
this.Pool = targetToClone.Pool?.Clone();
if (targetToClone.AgentSpecification != null)
{
this.AgentSpecification = new JObject(targetToClone.AgentSpecification);
}
if (targetToClone.m_agentIds?.Count > 0)
{
this.m_agentIds = targetToClone.m_agentIds;
}
}
/// <summary>
/// Gets or sets the target pool from which agents will be selected.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public AgentPoolReference Pool
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public JObject AgentSpecification
{
get;
set;
}
/// <summary>
/// Gets agent Ids filter on which deployment should be done.
/// </summary>
public List<Int32> AgentIds
{
get
{
if (m_agentIds == null)
{
m_agentIds = new List<Int32>();
}
return m_agentIds;
}
}
public override PhaseTarget Clone()
{
return new AgentPoolTarget(this);
}
public override Boolean IsValid(TaskDefinition task)
{
ArgumentUtility.CheckForNull(task, nameof(task));
return task.RunsOn.Contains(TaskRunsOnConstants.RunsOnAgent, StringComparer.OrdinalIgnoreCase);
}
internal override void Validate(
IPipelineContext context,
BuildOptions buildOptions,
ValidationResult result,
IList<Step> steps,
ISet<Demand> taskDemands)
{
// validate pool
Int32 poolId = 0;
String poolName = null;
var pool = this.Pool;
if (pool != null)
{
poolId = pool.Id;
poolName = pool.Name?.GetValue(context)?.Value;
}
if (poolId == 0 && String.IsNullOrEmpty(poolName) && buildOptions.ValidateResources)
{
result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotDefined()));
}
else
{
// we have a valid queue. record the reference
result.AddPoolReference(poolId, poolName);
// Attempt to resolve the queue using any identifier specified. We will look up by either ID
// or name and the ID is preferred since it is immutable and more specific.
if (buildOptions.ValidateResources)
{
TaskAgentPool taskAgentPool = null;
var resourceStore = context.ResourceStore;
if (resourceStore != null)
{
if (poolId != 0)
{
taskAgentPool = resourceStore.GetPool(poolId);
if (taskAgentPool == null)
{
result.UnauthorizedResources.Pools.Add(new AgentPoolReference { Id = poolId });
result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFound(poolId)));
}
}
else if (!String.IsNullOrEmpty(poolName))
{
taskAgentPool = resourceStore.GetPool(poolName);
if (taskAgentPool == null)
{
result.UnauthorizedResources.Pools.Add(new AgentPoolReference { Name = poolName });
result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFound(poolName)));
}
}
}
// Store the resolved values inline to the resolved resource for this validation run
if (taskAgentPool != null)
{
this.Pool.Id = taskAgentPool.Id;
this.Pool.Name = taskAgentPool.Name;
}
}
}
}
internal override JobExecutionContext CreateJobContext(PhaseExecutionContext context, string jobName, int attempt, bool continueOnError, int timeoutInMinutes, int cancelTimeoutInMinutes, IJobFactory jobFactory)
{
throw new NotSupportedException(nameof(AgentPoolTarget));
}
internal override ExpandPhaseResult Expand(PhaseExecutionContext context, bool continueOnError, int timeoutInMinutes, int cancelTimeoutInMinutes, IJobFactory jobFactory, JobExpansionOptions options)
{
throw new NotSupportedException(nameof(AgentPoolTarget));
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_agentIds?.Count == 0)
{
m_agentIds = null;
}
}
[DataMember(Name = "AgentIds", EmitDefaultValue = false)]
private List<Int32> m_agentIds;
}
}

View File

@@ -1,38 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class AgentQueueReference : ResourceReference
{
public AgentQueueReference()
{
}
private AgentQueueReference(AgentQueueReference referenceToCopy)
: base(referenceToCopy)
{
this.Id = referenceToCopy.Id;
}
[DataMember(EmitDefaultValue = false)]
public Int32 Id
{
get;
set;
}
public AgentQueueReference Clone()
{
return new AgentQueueReference(this);
}
public override String ToString()
{
return base.ToString() ?? this.Id.ToString();
}
}
}

View File

@@ -1,161 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public class AgentQueueStore : IAgentQueueStore
{
public AgentQueueStore(
IList<TaskAgentQueue> queues,
IAgentQueueResolver resolver = null)
{
this.Resolver = resolver;
Add(queues?.ToArray());
}
/// <summary>
/// Get the queue resolver configured for this store.
/// </summary>
public IAgentQueueResolver Resolver
{
get;
}
public void Authorize(IList<TaskAgentQueue> queues)
{
if (queues?.Count > 0)
{
foreach (var queue in queues)
{
Add(queue);
}
}
}
public IList<AgentQueueReference> GetAuthorizedReferences()
{
return m_resourcesById.Values.Select(x => new AgentQueueReference { Id = x.Id }).ToList();
}
public TaskAgentQueue Get(AgentQueueReference reference)
{
if (reference == null)
{
return null;
}
var referenceId = reference.Id;
var referenceName = reference.Name?.Literal;
if (reference.Id == 0 && String.IsNullOrEmpty(referenceName))
{
return null;
}
TaskAgentQueue authorizedResource = null;
if (referenceId != 0)
{
if (m_resourcesById.TryGetValue(referenceId, out authorizedResource))
{
return authorizedResource;
}
}
else if (!String.IsNullOrEmpty(referenceName))
{
if (m_resourcesByName.TryGetValue(referenceName, out List<TaskAgentQueue> matchingResources))
{
if (matchingResources.Count > 1)
{
throw new AmbiguousResourceSpecificationException(PipelineStrings.AmbiguousServiceEndpointSpecification(referenceId));
}
return matchingResources[0];
}
}
// If we have an authorizer then attempt to authorize the reference for use
authorizedResource = this.Resolver?.Resolve(reference);
if (authorizedResource != null)
{
Add(authorizedResource);
}
return authorizedResource;
}
private void Add(params TaskAgentQueue[] resources)
{
if (resources?.Length > 0)
{
foreach (var resource in resources)
{
// Track by ID
if (m_resourcesById.TryGetValue(resource.Id, out _))
{
continue;
}
m_resourcesById.Add(resource.Id, resource);
// not all references have names
var name = resource.Name;
if (string.IsNullOrWhiteSpace(name))
{
continue;
}
// Track by name
if (!m_resourcesByName.TryGetValue(name, out var list))
{
list = new List<TaskAgentQueue>();
m_resourcesByName.Add(name, list);
}
// Clobber previously added alternate name, with the real hosted queue.
// For example, during the "Hosted macOS High Sierra" transition, until the real queue
// existed, it was treated as an alternate name for the "Hosted macOS" queue. After the
// real "Hosted macOS High Sierra" queue was created, it took priority.
if (list.Count > 0 && list[0].Pool?.IsHosted == true && resource.Pool?.IsHosted == true)
{
list[0] = resource;
}
// Otherwise add the queue
else
{
list.Add(resource);
}
// Track by alternate name for specific hosted pools.
// For example, "Hosted macOS Preview" and "Hosted macOS" are equivalent.
if (resource.Pool?.IsHosted == true && s_alternateNames.TryGetValue(name, out var alternateNames))
{
foreach (var alternateName in alternateNames)
{
if (!m_resourcesByName.TryGetValue(alternateName, out list))
{
list = new List<TaskAgentQueue>();
m_resourcesByName.Add(alternateName, list);
}
if (list.Count == 0 || list[0].Pool?.IsHosted != true)
{
list.Add(resource);
}
}
}
}
}
}
private static readonly Dictionary<String, String[]> s_alternateNames = new Dictionary<String, String[]>(StringComparer.OrdinalIgnoreCase)
{
{ "Hosted macOS", new[] { "Hosted macOS Preview" } },
{ "Hosted macOS Preview", new[] { "Hosted macOS" } },
};
private readonly Dictionary<Int32, TaskAgentQueue> m_resourcesById = new Dictionary<Int32, TaskAgentQueue>();
private readonly Dictionary<String, List<TaskAgentQueue>> m_resourcesByName = new Dictionary<String, List<TaskAgentQueue>>(StringComparer.OrdinalIgnoreCase);
}
}

View File

@@ -1,647 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.DistributedTask.Pipelines.Runtime;
using GitHub.DistributedTask.Pipelines.Validation;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides options for phase execution on an agent within a queue.
/// </summary>
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public class AgentQueueTarget : PhaseTarget
{
public AgentQueueTarget()
: base(PhaseTargetType.Queue)
{
}
private AgentQueueTarget(AgentQueueTarget targetToClone)
: base(targetToClone)
{
this.Queue = targetToClone.Queue?.Clone();
this.Execution = targetToClone.Execution?.Clone();
if (targetToClone.AgentSpecification != null)
{
this.AgentSpecification = new JObject(targetToClone.AgentSpecification);
}
if (targetToClone.SidecarContainers?.Count > 0)
{
m_sidecarContainers = new Dictionary<String, ExpressionValue<String>>(targetToClone.SidecarContainers, StringComparer.OrdinalIgnoreCase);
}
}
/// <summary>
/// Gets or sets the target queue from which agents will be selected.
/// </summary>
[DataMember(EmitDefaultValue = false)]
[JsonConverter(typeof(QueueJsonConverter))]
public AgentQueueReference Queue
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public JObject AgentSpecification
{
get;
set;
}
/// <summary>
/// Gets or sets parallel execution options which control expansion and execution of the phase.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public ParallelExecutionOptions Execution
{
get;
set;
}
/// <summary>
/// Gets or sets workspace options which control how agent manage the workspace of the phase.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public WorkspaceOptions Workspace
{
get;
set;
}
/// <summary>
/// Gets or sets the container the phase will be run in.
/// </summary>
[DataMember(EmitDefaultValue = false)]
[JsonConverter(typeof(ExpressionValueJsonConverter<String>))]
public ExpressionValue<String> Container
{
get;
set;
}
/// <summary>
/// Gets the sidecar containers that will run alongside the phase.
/// </summary>
public IDictionary<String, ExpressionValue<String>> SidecarContainers
{
get
{
if (m_sidecarContainers == null)
{
m_sidecarContainers = new Dictionary<String, ExpressionValue<String>>(StringComparer.OrdinalIgnoreCase);
}
return m_sidecarContainers;
}
}
public override PhaseTarget Clone()
{
return new AgentQueueTarget(this);
}
public override Boolean IsValid(TaskDefinition task)
{
ArgumentUtility.CheckForNull(task, nameof(task));
return task.RunsOn.Contains(TaskRunsOnConstants.RunsOnAgent, StringComparer.OrdinalIgnoreCase);
}
/// <summary>
/// Creates a clone of this and attempts to resolve all expressions and macros.
/// </summary>
internal AgentQueueTarget Evaluate(
IPipelineContext context,
ValidationResult result)
{
var qname = String.Empty;
try
{
qname = context.ExpandVariables(this.Queue?.Name?.GetValue(context).Value);
}
catch (DistributedTask.Expressions.ExpressionException ee)
{
result.Errors.Add(new PipelineValidationError(ee.Message));
return null;
}
var literalTarget = this.Clone() as AgentQueueTarget;
var spec = this.AgentSpecification;
if (spec != null)
{
spec = context.Evaluate(this.AgentSpecification).Value;
literalTarget.AgentSpecification = spec;
}
// Note! The "vmImage" token of the agent spec is currently treated specially.
// This is a temporary relationship that allows vmImage agent specs to specify
// the hosted pool to use.
// It would be better to factor out this work into a separate, plug-in validator.
if (String.IsNullOrEmpty(qname) && spec != null)
{
const string VMImage = "vmImage"; // should be: YamlConstants.VMImage, which is inaccessible :(
spec.TryGetValue(VMImage, out var token);
if (token != null && token.Type == JTokenType.String)
{
var rawTokenValue = token.Value<String>();
var resolvedPoolName = PoolNameForVMImage(rawTokenValue);
if (resolvedPoolName == null)
{
result.Errors.Add(new PipelineValidationError($"Unexpected vmImage '{rawTokenValue}'"));
return null;
}
else
{
spec.Remove(VMImage);
literalTarget.Queue = new AgentQueueReference
{
Name = resolvedPoolName
};
}
}
}
else
{
literalTarget.Queue.Name = qname;
}
return literalTarget;
}
/// <summary>
/// returns true for strings structured like expressions or macros.
/// they could techincally be literals though.
/// </summary>
internal static Boolean IsProbablyExpressionOrMacro(String s)
{
return ExpressionValue.IsExpression(s) || VariableUtility.IsVariable(s);
}
/// <summary>
/// returns true if this model is composed only of literal values (no expressions)
/// </summary>
internal Boolean IsLiteral()
{
var queue = this.Queue;
if (queue != null)
{
var queueName = queue.Name;
if (queueName != null)
{
if (!queueName.IsLiteral || VariableUtility.IsVariable(queueName.Literal))
{
return false;
}
}
}
var spec = this.AgentSpecification;
if (spec != null)
{
bool IsLiteral(JObject o)
{
foreach (var pair in o)
{
switch (pair.Value.Type)
{
case JTokenType.String:
if (IsProbablyExpressionOrMacro(pair.Value.Value<String>()))
{
return false;
}
break;
case JTokenType.Object:
if (!IsLiteral(pair.Value.Value<JObject>()))
{
return false;
}
break;
default:
break;
}
}
return true;
}
if (!IsLiteral(spec))
{
return false;
}
}
return true;
}
/// <summary>
/// Temporary code to translate vmImage. Pool providers work will move this to a different layer
/// </summary>
/// <param name="vmImageValue"></param>
/// <returns>Hosted pool name</returns>
internal static String PoolNameForVMImage(String vmImageValue)
{
switch ((vmImageValue ?? String.Empty).ToUpperInvariant())
{
case "UBUNTU 16.04":
case "UBUNTU-16.04":
case "UBUNTU LATEST":
case "UBUNTU-LATEST":
return "Hosted Ubuntu 1604";
case "UBUNTU 18.04":
case "UBUNTU-18.04":
return "Hosted Ubuntu 1804";
case "VISUAL STUDIO 2015 ON WINDOWS SERVER 2012R2":
case "VS2015-WIN2012R2":
return "Hosted";
case "VISUAL STUDIO 2017 ON WINDOWS SERVER 2016":
case "VS2017-WIN2016":
return "Hosted VS2017";
case "WINDOWS-2019-VS2019":
case "WINDOWS-2019":
case "WINDOWS LATEST":
case "WINDOWS-LATEST":
return "Hosted Windows 2019 with VS2019";
case "WINDOWS SERVER 1803":
case "WIN1803":
return "Hosted Windows Container";
case "MACOS 10.13":
case "MACOS-10.13":
case "XCODE 9 ON MACOS 10.13":
case "XCODE9-MACOS10.13":
case "XCODE 10 ON MACOS 10.13":
case "XCODE10-MACOS10.13":
return "Hosted macOS High Sierra";
case "MACOS 10.14":
case "MACOS-10.14":
case "MACOS LATEST":
case "MACOS-LATEST":
return "Hosted macOS";
default:
return null;
}
}
/// <summary>
/// PipelineBuildContexts have build options.
/// GraphExecutionContexts have dependencies.
/// We might need either depending on the situation.
/// </summary>
private TaskAgentPoolReference ValidateQueue(
IPipelineContext context,
ValidationResult result,
BuildOptions buildOptions)
{
var queueId = 0;
var queueName = (String)null;
var queueNameIsUnresolvableExpression = false; // true iff Name is an expression, we're allowed to use them, and it has no current value
var queue = this.Queue;
if (queue != null)
{
queueId = queue.Id;
// resolve name
var expressionValueName = queue.Name;
if (expressionValueName != null && (buildOptions.EnableResourceExpressions || expressionValueName.IsLiteral))
{
// resolve expression
try
{
queueName = expressionValueName.GetValue(context).Value;
queueNameIsUnresolvableExpression = !expressionValueName.IsLiteral && String.IsNullOrEmpty(queueName);
}
catch (Exception ee)
{
// something bad happened trying to fetch the value.
// We do not really care what though. Just record the error and move on.
queueName = null;
if (buildOptions.ValidateExpressions && buildOptions.ValidateResources)
{
result.Errors.Add(new PipelineValidationError(ee.Message));
}
}
// resolve name macro
if (buildOptions.EnableResourceExpressions && queueName != null && VariableUtility.IsVariable(queueName))
{
queueName = context.ExpandVariables(queueName);
if (VariableUtility.IsVariable(queueName))
{
// name appears to be a macro that is not defined.
queueNameIsUnresolvableExpression = true;
}
}
}
}
if (queueNameIsUnresolvableExpression || (queueId == 0 && String.IsNullOrEmpty(queueName)))
{
// could not determine what queue user was talking about
if (!buildOptions.AllowEmptyQueueTarget && buildOptions.ValidateResources)
{
// expression-based queue names are allowed to be unresolved at compile time.
// TEMPORARY: literal queue names do not error at compile time if special keys exist
if (!queueNameIsUnresolvableExpression || buildOptions.ValidateExpressions)
{
if (!String.IsNullOrEmpty(queueName))
{
result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFoundByName(queueName)));
}
else
{
var expressionValueName = queue?.Name;
if (expressionValueName == null || expressionValueName.IsLiteral)
{
result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotDefined()));
}
else if (expressionValueName != null)
{
result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFoundByName(expressionValueName.Expression)));
}
}
}
}
}
else
{
// we have a valid queue. record the reference
result.AddQueueReference(id: queueId, name: queueName);
// Attempt to resolve the queue using any identifier specified. We will look up by either ID
// or name and the ID is preferred since it is immutable and more specific.
if (buildOptions.ValidateResources)
{
TaskAgentQueue taskAgentQueue = null;
var resourceStore = context.ResourceStore;
if (resourceStore != null)
{
if (queueId != 0)
{
taskAgentQueue = resourceStore.GetQueue(queueId);
if (taskAgentQueue == null)
{
result.UnauthorizedResources.Queues.Add(new AgentQueueReference { Id = queueId });
result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFound(queueId)));
}
}
else if (!String.IsNullOrEmpty(queueName))
{
taskAgentQueue = resourceStore.GetQueue(queueName);
if (taskAgentQueue == null)
{
result.UnauthorizedResources.Queues.Add(new AgentQueueReference { Name = queueName });
result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFoundByName(queueName)));
}
}
}
// Store the resolved values inline to the resolved resource for this validation run
if (taskAgentQueue != null)
{
this.Queue.Id = taskAgentQueue.Id;
return taskAgentQueue.Pool;
}
}
}
return null;
}
internal override void Validate(
IPipelineContext context,
BuildOptions buildOptions,
ValidationResult result,
IList<Step> steps,
ISet<Demand> taskDemands)
{
// validate queue
var resolvedPool = ValidateQueue(context, result, buildOptions);
Boolean includeTaskDemands = resolvedPool == null || !resolvedPool.IsHosted;
// Add advanced-checkout min agent demand
Boolean advancedCheckout = false;
int checkoutTasks = 0;
int injectedSystemTasks = 0;
bool countInjectSystemTasks = true;
for (int index = 0; index < steps.Count; index++)
{
var step = steps[index];
// Task
if (step.Type == StepType.Task)
{
var task = step as TaskStep;
if (task.Name.StartsWith("__system_"))
{
if (countInjectSystemTasks)
{
injectedSystemTasks++;
}
}
else if (task.IsCheckoutTask())
{
countInjectSystemTasks = false;
checkoutTasks++;
if (context.EnvironmentVersion < 2)
{
if (index > 0 && index - injectedSystemTasks > 0)
{
result.Errors.Add(new PipelineValidationError(PipelineStrings.CheckoutMustBeTheFirstStep()));
}
}
else
{
if (index > 0)
{
advancedCheckout = true;
}
}
if (task.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.Repository, out String repository) &&
!String.Equals(repository, PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase) &&
!String.Equals(repository, PipelineConstants.NoneAlias, StringComparison.OrdinalIgnoreCase) &&
!String.Equals(repository, PipelineConstants.DesignerRepo, StringComparison.OrdinalIgnoreCase))
{
result.Errors.Add(new PipelineValidationError(PipelineStrings.CheckoutStepRepositoryNotSupported(task.Inputs[PipelineConstants.CheckoutTaskInputs.Repository])));
}
}
else
{
countInjectSystemTasks = false;
}
}
}
if (checkoutTasks > 1)
{
result.Errors.Add(new PipelineValidationError(PipelineStrings.CheckoutMultipleRepositoryNotSupported()));
}
if (advancedCheckout)
{
taskDemands.Add(new DemandMinimumVersion(PipelineConstants.AgentVersionDemandName, PipelineConstants.AdvancedCheckoutMinAgentVersion));
}
// Now we need to ensure we have only a single demand for the mimimum agent version. We effectively remove
// every agent version demand we find and keep track of the one with the highest value. Assuming we located
// one or more of these demands we will ensure it is merged in at the end.
var minimumAgentVersionDemand = ResolveAgentVersionDemand(taskDemands);
minimumAgentVersionDemand = ResolveAgentVersionDemand(this.Demands, minimumAgentVersionDemand);
// not include demands from task if phase is running inside container
// container suppose provide any required tool task needs
if (this.Container != null)
{
includeTaskDemands = false;
}
// Merge the phase demands with the implicit demands from tasks.
if (includeTaskDemands && buildOptions.RollupStepDemands)
{
this.Demands.UnionWith(taskDemands);
}
// If we resolved a minimum agent version demand then we go ahead and merge it in
// We want to do this even if targetting Hosted
if (minimumAgentVersionDemand != null)
{
this.Demands.Add(minimumAgentVersionDemand);
}
}
private static DemandMinimumVersion ResolveAgentVersionDemand(
ISet<Demand> demands,
DemandMinimumVersion currentMinimumVersion = null)
{
var minVersionDemand = DemandMinimumVersion.MaxAndRemove(demands);
if (minVersionDemand != null && (currentMinimumVersion == null || DemandMinimumVersion.CompareVersion(minVersionDemand.Value, currentMinimumVersion.Value) > 0))
{
return minVersionDemand;
}
else
{
return currentMinimumVersion;
}
}
internal override JobExecutionContext CreateJobContext(
PhaseExecutionContext context,
String jobName,
Int32 attempt,
Boolean continueOnError,
Int32 timeoutInMinutes,
Int32 cancelTimeoutInMinutes,
IJobFactory jobFactory)
{
context.Trace?.EnterProperty("CreateJobContext");
var execution = this.Execution ?? new ParallelExecutionOptions();
var jobContext = execution.CreateJobContext(
context,
jobName,
attempt,
this.Container,
this.SidecarContainers,
continueOnError,
timeoutInMinutes,
cancelTimeoutInMinutes,
jobFactory);
context.Trace?.LeaveProperty("CreateJobContext");
if (jobContext != null)
{
jobContext.Job.Definition.Workspace = this.Workspace?.Clone();
}
return jobContext;
}
internal override ExpandPhaseResult Expand(
PhaseExecutionContext context,
Boolean continueOnError,
Int32 timeoutInMinutes,
Int32 cancelTimeoutInMinutes,
IJobFactory jobFactory,
JobExpansionOptions options)
{
context.Trace?.EnterProperty("Expand");
var execution = this.Execution ?? new ParallelExecutionOptions();
var result = execution.Expand(
context,
this.Container,
this.SidecarContainers,
continueOnError,
timeoutInMinutes,
cancelTimeoutInMinutes,
jobFactory,
options);
context.Trace?.LeaveProperty("Expand");
foreach (var job in result.Jobs)
{
job.Definition.Workspace = this.Workspace?.Clone();
}
return result;
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_sidecarContainers?.Count == 0)
{
m_sidecarContainers = null;
}
}
[DataMember(Name = "SidecarContainers", EmitDefaultValue = false)]
private IDictionary<String, ExpressionValue<String>> m_sidecarContainers;
/// <summary>
/// Ensures conversion of a TaskAgentQueue into an AgentQueueReference works properly when the serializer
/// is configured to write/honor type information. This is a temporary converter that may be removed after
/// M127 ships.
/// </summary>
private sealed class QueueJsonConverter : VssSecureJsonConverter
{
public override Boolean CanWrite => false;
public override Boolean CanConvert(Type objectType)
{
return objectType.Equals(typeof(AgentQueueReference));
}
public override Object ReadJson(
JsonReader reader,
Type objectType,
Object existingValue,
JsonSerializer serializer)
{
var rawValue = JObject.Load(reader);
using (var objectReader = rawValue.CreateReader())
{
var newValue = new AgentQueueReference();
serializer.Populate(objectReader, newValue);
return newValue;
}
}
public override void WriteJson(
JsonWriter writer,
Object value,
JsonSerializer serializer)
{
throw new NotImplementedException();
}
}
}
}

View File

@@ -1,15 +0,0 @@
using System;
namespace GitHub.DistributedTask.Pipelines.Artifacts
{
public static class ArtifactConstants
{
internal static class ArtifactType
{
internal const String Build = nameof(Build);
internal const String Container = nameof(Container);
internal const String Package = nameof(Package);
internal const String SourceControl = nameof(SourceControl);
}
}
}

View File

@@ -1,150 +0,0 @@
using System;
using System.Collections.Generic;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.Artifacts;
namespace GitHub.DistributedTask.Orchestration.Server.Artifacts
{
public static class DownloadStepExtensions
{
public static Boolean IsDownloadBuildStepExists(this IReadOnlyList<JobStep> steps)
{
foreach (var step in steps)
{
if (step is TaskStep taskStep)
{
if (taskStep.IsDownloadBuildTask())
{
return true;
}
}
}
return false;
}
public static Boolean IsDownloadBuildTask(this Step step)
{
if (step is TaskStep taskStep &&
taskStep.Reference != null &&
taskStep.Reference.Name.Equals(YamlArtifactConstants.DownloadBuild, StringComparison.OrdinalIgnoreCase))
{
return true;
}
return false;
}
public static Boolean IsDownloadStepDisabled(this Step step)
{
// either download task or downloadBuild task has none keyword return true.
if (step is TaskStep taskStep &&
taskStep.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias) &&
String.Equals(alias, YamlArtifactConstants.None, StringComparison.OrdinalIgnoreCase) &&
(step.IsDownloadBuildTask() || step.IsDownloadTask()))
{
return true;
}
return false;
}
public static Boolean IsDownloadTask(this Step step)
{
if (step is TaskStep taskStep &&
taskStep.Reference != null &&
taskStep.Reference.Id.Equals(PipelineArtifactConstants.DownloadTask.Id) &&
taskStep.Reference.Version == PipelineArtifactConstants.DownloadTask.Version)
{
return true;
}
else
{
return false;
}
}
public static Boolean IsDownloadCurrentPipelineArtifactStep(this Step step)
{
if (step is TaskStep taskStep &&
taskStep.IsDownloadTask() &&
taskStep.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias) &&
String.Equals(alias, YamlArtifactConstants.Current, StringComparison.OrdinalIgnoreCase))
{
return true;
}
return false;
}
public static Boolean IsDownloadPipelineArtifactStepDisabled(this TaskStep step)
{
if (step.IsDownloadTask() &&
step.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias) &&
String.Equals(alias, YamlArtifactConstants.None, StringComparison.OrdinalIgnoreCase))
{
return true;
}
return false;
}
public static Boolean IsDownloadExternalPipelineArtifactStep(this TaskStep step)
{
if (step.IsDownloadTask() &&
step.Inputs != null &&
step.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias) &&
!String.IsNullOrEmpty(alias) &&
!alias.Equals(YamlArtifactConstants.Current, StringComparison.OrdinalIgnoreCase) &&
!alias.Equals(YamlArtifactConstants.None, StringComparison.OrdinalIgnoreCase))
{
return true;
}
return false;
}
public static String GetAliasFromTaskStep(this TaskStep step)
{
return step.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias)
? alias
: String.Empty;
}
public static Boolean IsDownloadPipelineArtifactStepExists(this IReadOnlyList<JobStep> steps)
{
foreach (var step in steps)
{
if (step is TaskStep taskStep)
{
if (taskStep.IsDownloadTask())
{
return true;
}
}
}
return false;
}
public static void Merge(
this IDictionary<String, String> first,
IDictionary<String, String> second)
{
foreach (var key in second?.Keys ?? new List<String>())
{
first[key] = second[key];
}
}
public static void Merge(
this IDictionary<String, String> first,
IReadOnlyDictionary<String, String> second)
{
foreach (var key in second?.Keys ?? new List<String>())
{
first[key] = second[key];
}
}
}
}

View File

@@ -1,49 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines.Artifacts
{
/// <summary>
/// Provides a mechanism to resolve the artifacts
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IArtifactResolver
{
/// <summary>
/// Given a resource, it gets the corresponding task id from its extension
/// </summary>
/// <param name="resource"></param>
/// <returns></returns>
Guid GetArtifactDownloadTaskId(Resource resource);
/// <summary>
/// Given a resource and step, it maps the resource properties to task inputs
/// </summary>
/// <param name="resource"></param>
/// <param name="taskStep"></param>
void PopulateMappedTaskInputs(Resource resource, TaskStep taskStep);
/// <summary>
/// Given an artifact step, it resolves the artifact and returns a download artifact task
/// </summary>
/// <param name="pipelineContext"></param>
/// <param name="step"></param>
/// <returns></returns>
Boolean ResolveStep(IPipelineContext pipelineContext, JobStep step, out IList<TaskStep> resolvedSteps);
/// <summary>
/// Given resource store and task step it translate the taskStep into actual task reference with mapped inputs
/// </summary>
/// <param name="resourceStore"></param>
/// <param name="taskStep"></param>
/// <returns></returns>
Boolean ResolveStep(IResourceStore resourceStore, TaskStep taskStep, out String errorMessage);
/// <summary>
/// Validate the given resource in the YAML file. Also resolve version for the resource if not resolved already
/// </summary>
/// <param name="resources"></param>
Boolean ValidateDeclaredResource(Resource resource, out PipelineValidationError error);
}
}

View File

@@ -1,113 +0,0 @@
using System;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines.Artifacts
{
public static class PipelineArtifactConstants
{
internal static class CommonArtifactTaskInputValues
{
internal const String DefaultDownloadPath = "$(Pipeline.Workspace)";
internal const String DefaultDownloadPattern = "**";
}
public static class PipelineArtifactTaskInputs
{
public const String ArtifactName = "artifactName";
public const String BuildType = "buildType";
public const String BuildId = "buildId";
public const String BuildVersionToDownload = "buildVersionToDownload";
public const String Definition = "definition";
public const String DownloadType = "downloadType";
public const String DownloadPath = "downloadPath";
public const String FileSharePath = "fileSharePath";
public const String ItemPattern = "itemPattern";
public const String Project = "project";
}
public static class PipelineArtifactTaskInputValues
{
public const String DownloadTypeSingle = "single";
public const String SpecificBuildType = "specific";
public const String CurrentBuildType = "current";
public const String AutomaticMode = "automatic";
public const String ManualMode = "manual";
}
internal static class YamlConstants
{
internal const String Connection = "connection";
internal const String Current = "current";
internal const String None = "none";
}
public static class ArtifactTypes
{
public const string AzurePipelineArtifactType = "Pipeline";
}
public static class DownloadTaskInputs
{
public const String Alias = "alias";
public const String Artifact = "artifact";
public const String Mode = "mode";
public const String Path = "path";
public const String Patterns = "patterns";
}
public static class TraceConstants
{
public const String Area = "PipelineArtifacts";
public const String DownloadPipelineArtifactFeature = "DownloadPipelineArtifact";
}
public static readonly TaskDefinition DownloadTask = new TaskDefinition
{
Id = new Guid("30f35852-3f7e-4c0c-9a88-e127b4f97211"),
Name = "Download",
FriendlyName = "Download Artifact",
Author = "Microsoft",
RunsOn = { TaskRunsOnConstants.RunsOnAgent },
Version = new TaskVersion("1.0.0"),
Description = "Downloads pipeline type artifacts.",
HelpMarkDown = "[More Information](https://github.com)",
Inputs = {
new TaskInputDefinition()
{
Name = DownloadTaskInputs.Artifact,
Required = true,
InputType = TaskInputType.String
},
new TaskInputDefinition()
{
Name = DownloadTaskInputs.Patterns,
Required = false,
DefaultValue = "**",
InputType = TaskInputType.String
},
new TaskInputDefinition()
{
Name = DownloadTaskInputs.Path,
Required = false,
InputType = TaskInputType.String
},
new TaskInputDefinition()
{
Name=DownloadTaskInputs.Alias,
Required = false,
InputType = TaskInputType.String
}
},
};
}
}

View File

@@ -1,16 +0,0 @@
using System;
namespace GitHub.DistributedTask.Pipelines.Artifacts
{
public static class YamlArtifactConstants
{
public const String Alias = "alias";
public const String Connection = "connection";
public const String Current = "current";
public const String Download = "download";
public const String DownloadBuild = "downloadBuild";
public const String None = "none";
public const String Path = "path";
public const String Patterns = "patterns";
}
}

View File

@@ -1,119 +0,0 @@
using System;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a mechanism for controlling validation behaviors.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public class BuildOptions
{
public static BuildOptions None { get; } = new BuildOptions();
/// <summary>
/// Gets or sets a value indicating whether or not a queue target without a queue should be considered an
/// error.
/// </summary>
public Boolean AllowEmptyQueueTarget
{
get;
set;
}
/// <summary>
/// Allow hyphens in names checked by the NameValidator. Used for yaml workflow schema
/// </summary>
public Boolean AllowHyphenNames
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether to demand the latest agent version.
/// </summary>
public Boolean DemandLatestAgent
{
get;
set;
}
/// <summary>
/// If true, resource definitions are allowed to use expressions
/// </summary>
public Boolean EnableResourceExpressions
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not to resolve resource version.
/// </summary>
public Boolean ResolveResourceVersions
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether input aliases defined in a task definition are honored.
/// </summary>
public Boolean ResolveTaskInputAliases
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not the individual step demands should be rolled up into their
/// parent phase's demands. Settings this value to true will result in Phase's demand sets being a superset
/// of their children's demands.
/// </summary>
public Boolean RollupStepDemands
{
get;
set;
}
/// <summary>
/// If true, all expressions must be resolvable given a provided context.
/// This is normally going to be false for plan compile time and true for plan runtime.
/// </summary>
public Boolean ValidateExpressions
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not to validate resource existence and other constraints.
/// </summary>
public Boolean ValidateResources
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not step names provided by the caller should be validated for
/// correctness and uniqueness. Setting this value to false will automatically fix invalid step names and
/// de-duplicate step names which may lead to unexpected behavior at runtime when binding output variables.
/// </summary>
public Boolean ValidateStepNames
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not to run input validation defined by the task author.
/// </summary>
public Boolean ValidateTaskInputs
{
get;
set;
}
}
}

View File

@@ -1,68 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public static class BuildPropertyNames
{
public static readonly String Branch = "branch";
public static readonly String Connection = "connection";
public static readonly String Source = "source";
public static readonly String Type = "type";
public static readonly String Version = "version";
}
/// <summary>
/// Provides a data contract for a build resource referenced by a pipeline.
/// </summary>
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public class BuildResource : Resource
{
public BuildResource()
{
}
protected BuildResource(BuildResource resourceToCopy)
: base(resourceToCopy)
{
}
/// <summary>
/// Gets or sets the type of build resource.
/// </summary>
public String Type
{
get
{
return this.Properties.Get<String>(BuildPropertyNames.Type);
}
set
{
this.Properties.Set(BuildPropertyNames.Type, value);
}
}
/// <summary>
/// Gets or sets the version of the build resource.
/// </summary>
public String Version
{
get
{
return this.Properties.Get<String>(BuildPropertyNames.Version);
}
set
{
this.Properties.Set(BuildPropertyNames.Version, value);
}
}
public BuildResource Clone()
{
return new BuildResource(this);
}
}
}

View File

@@ -1,63 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
using GitHub.Services.WebApi.Internal;
namespace GitHub.DistributedTask.Pipelines.Checkpoints
{
[EditorBrowsable(EditorBrowsableState.Never)]
[DataContract]
[ClientIgnore]
public class CheckpointContext
{
/// <summary>
/// Unique id of the checkpoint, also used as the timeline record id
/// </summary>
[DataMember(IsRequired = true)]
public Guid Id { get; set; }
/// <summary>
/// Auth token for querying DistributedTask
/// </summary>
[DataMember(IsRequired = true)]
public String Token { get; set; }
/// <summary>
/// Checkpoint Instance Id
/// Use this for sending decision events and tracing telemetry.
/// </summary>
[DataMember(IsRequired = true)]
public String OrchestrationId { get; set; }
/// <summary>
/// PlanId
/// </summary>
[DataMember(IsRequired = true)]
public Guid PlanId { get; set; }
/// <summary>
/// Which TaskHub to use when sending decision events;
/// Use this for sending decision events.
/// </summary>
[DataMember(IsRequired = true)]
public String HubName { get; set; }
/// <summary>
/// The project requesting decision.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public CheckpointScope Project { get; set; }
/// <summary>
/// The pipeline (definition) requesting decision.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public PipelineScope Pipeline { get; set; }
/// <summary>
/// The graph node requesting decision.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public GraphNodeScope GraphNode { get; set; }
}
}

View File

@@ -1,36 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
using GitHub.Services.WebApi.Internal;
namespace GitHub.DistributedTask.Pipelines.Checkpoints
{
[EditorBrowsable(EditorBrowsableState.Never)]
[DataContract]
[ClientIgnore]
public class CheckpointDecision
{
/// <summary>
/// Checkpoint id, provided on context
/// </summary>
[DataMember(IsRequired = true)]
public Guid Id { get; set; }
/// <summary>
/// Decision
/// </summary>
[DataMember(IsRequired = true)]
public String Result { get; set; }
/// <summary>
/// Additional information (optional)
/// </summary>
[DataMember(IsRequired = false, EmitDefaultValue = false)]
public String Message { get; set; }
// Decision possibilities
public const String Approved = "Approved";
public const String Denied = "Denied";
public const String Canceled = "Canceled";
}
}

View File

@@ -1,56 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.WebApi.Internal;
namespace GitHub.DistributedTask.Pipelines.Checkpoints
{
/// <summary>
/// Provides context regarding the state of the orchestration.
/// Consumers may choose to use this information to cache decisions.
/// EG, if you wanted to return the same decision for this and all
/// future requests issuing from the same project / pipeline / stage / run
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
[DataContract]
[ClientIgnore]
public class CheckpointScope
{
/// <summary>
/// May be used in uniquely identify this scope for future reference.
/// </summary>
[DataMember(IsRequired = true)]
public String Id { get; set; }
/// <summary>
/// The friendly name of the scope
/// </summary>
[DataMember(EmitDefaultValue = false)]
public String Name { get; set; }
}
[EditorBrowsable(EditorBrowsableState.Never)]
[DataContract]
[ClientIgnore]
public class GraphNodeScope : CheckpointScope
{
/// <summary>
/// Facilitates approving only a single attempt of a graph node in a specific run of a pipeline.
/// </summary>
[DataMember(IsRequired = true)]
public Int32 Attempt { get; set; } = 1;
}
[EditorBrowsable(EditorBrowsableState.Never)]
[DataContract]
[ClientIgnore]
public class PipelineScope : CheckpointScope
{
/// <summary>
/// Pipeline URLs
/// </summary>
[DataMember(IsRequired = true)]
public TaskOrchestrationOwner Owner { get; set; }
}
}

View File

@@ -1,22 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
using GitHub.Services.WebApi.Internal;
namespace GitHub.DistributedTask.Pipelines.Checkpoints
{
[EditorBrowsable(EditorBrowsableState.Never)]
[DataContract]
[ClientIgnore]
public class ResourceInfo
{
[DataMember(EmitDefaultValue = false)]
public String Id { get; set; }
[DataMember(EmitDefaultValue = false)]
public String Name { get; set; }
[DataMember(EmitDefaultValue = false)]
public String TypeName { get; set; }
}
}

View File

@@ -1,25 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class ConditionResult
{
[DataMember]
public Boolean Value
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public String Trace
{
get;
set;
}
}
}

View File

@@ -1,89 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public class ContinuousIntegrationTrigger : PipelineTrigger
{
public ContinuousIntegrationTrigger()
: base(PipelineTriggerType.ContinuousIntegration)
{
Enabled = true;
}
[DataMember(EmitDefaultValue = true)]
public Boolean Enabled
{
get;
set;
}
/// <summary>
/// Indicates whether changes should be batched while another CI pipeline is running.
/// </summary>
/// <remarks>
/// If this is true, then changes submitted while a CI pipeline is running will be batched and built in one new CI pipeline when the current pipeline finishes.
/// If this is false, then a new CI pipeline will be triggered for each change to the repository.
/// </remarks>
[DataMember(EmitDefaultValue = false)]
public Boolean BatchChanges
{
get;
set;
}
/// <summary>
/// A list of filters that describe which branches will trigger pipelines.
/// </summary>
public IList<String> BranchFilters
{
get
{
if (m_branchFilters == null)
{
m_branchFilters = new List<String>();
}
return m_branchFilters;
}
}
/// <summary>
/// A list of filters that describe which paths will trigger pipelines.
/// </summary>
public IList<String> PathFilters
{
get
{
if (m_pathFilters == null)
{
m_pathFilters = new List<String>();
}
return m_pathFilters;
}
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_branchFilters?.Count == 0)
{
m_branchFilters = null;
}
if (m_pathFilters?.Count == 0)
{
m_pathFilters = null;
}
}
[DataMember(Name = "BranchFilters", EmitDefaultValue = false)]
private List<String> m_branchFilters;
[DataMember(Name = "PathFilters", EmitDefaultValue = false)]
private List<String> m_pathFilters;
}
}

View File

@@ -1,61 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.Services.Common;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a default implementation of a counter store.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public class CounterStore : ICounterStore
{
public CounterStore(
IDictionary<String, Int32> counters = null,
ICounterResolver resolver = null)
{
if (counters?.Count > 0)
{
m_counters.AddRange(counters);
}
this.Resolver = resolver;
}
public IReadOnlyDictionary<String, Int32> Counters
{
get
{
return m_counters;
}
}
private ICounterResolver Resolver
{
get;
}
public Int32 Increment(
IPipelineContext context,
String prefix,
Int32 seed)
{
if (m_counters.TryGetValue(prefix, out Int32 existingValue))
{
return existingValue;
}
Int32 newValue = seed;
if (this.Resolver != null)
{
newValue = this.Resolver.Increment(context, prefix, seed);
m_counters[prefix] = newValue;
}
return newValue;
}
private readonly Dictionary<String, Int32> m_counters = new Dictionary<String, Int32>(StringComparer.OrdinalIgnoreCase);
}
}

View File

@@ -1,50 +0,0 @@
using System.ComponentModel;
using GitHub.DistributedTask.Pipelines.Runtime;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public struct CreateJobResult
{
public CreateJobResult(
JobExecutionContext context,
Job job)
{
this.Job = job;
this.Context = context;
}
public Job Job
{
get;
}
public JobExecutionContext Context
{
get;
}
}
[EditorBrowsable(EditorBrowsableState.Never)]
public struct CreateTaskResult
{
public CreateTaskResult(
TaskStep task,
TaskDefinition definition)
{
this.Task = task;
this.Definition = definition;
}
public TaskStep Task
{
get;
}
public TaskDefinition Definition
{
get;
}
}
}

View File

@@ -1,74 +0,0 @@
using System.Runtime.Serialization;
using GitHub.DistributedTask.Pipelines.Validation;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
internal enum DeploymentRollingOption
{
[EnumMember]
Absolute,
[EnumMember]
Percentage
}
[DataContract]
internal class DeploymentExecutionOptions
{
public DeploymentExecutionOptions()
{
}
private DeploymentExecutionOptions(DeploymentExecutionOptions optionsToCopy)
{
this.RollingOption = optionsToCopy.RollingOption;
this.RollingValue = optionsToCopy.RollingValue;
}
[DataMember]
public DeploymentRollingOption RollingOption
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public uint RollingValue
{
get;
set;
}
public DeploymentExecutionOptions Clone()
{
return new DeploymentExecutionOptions(this);
}
public void Validate(
IPipelineContext context,
ValidationResult result)
{
switch (RollingOption)
{
case DeploymentRollingOption.Absolute:
if (RollingValue == 0)
{
result.Errors.Add(new PipelineValidationError(PipelineStrings.InvalidAbsoluteRollingValue()));
}
break;
case DeploymentRollingOption.Percentage:
if (RollingValue == 0 || RollingValue > 100)
{
result.Errors.Add(new PipelineValidationError(PipelineStrings.InvalidPercentageRollingValue()));
}
break;
default:
result.Errors.Add(new PipelineValidationError(PipelineStrings.InvalidRollingOption(RollingOption)));
break;
}
}
}
}

View File

@@ -1,158 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.DistributedTask.Pipelines.Runtime;
using GitHub.DistributedTask.Pipelines.Validation;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
internal class DeploymentGroupTarget : PhaseTarget
{
public DeploymentGroupTarget()
: base(PhaseTargetType.DeploymentGroup)
{
}
private DeploymentGroupTarget(DeploymentGroupTarget targetToClone)
: base(targetToClone)
{
this.DeploymentGroup = targetToClone.DeploymentGroup?.Clone();
this.Execution = targetToClone.Execution?.Clone();
if (targetToClone.m_tags != null && targetToClone.m_tags.Count > 0)
{
m_tags = new HashSet<String>(targetToClone.m_tags, StringComparer.OrdinalIgnoreCase);
}
}
[DataMember]
public DeploymentGroupReference DeploymentGroup
{
get;
set;
}
public ISet<String> Tags
{
get
{
if (m_tags == null)
{
m_tags = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
}
return m_tags;
}
}
/// <summary>
/// Gets targets Ids filter on which deployment should be done.
/// </summary>
public List<Int32> TargetIds
{
get
{
if (m_targetIds == null)
{
m_targetIds = new List<Int32>();
}
return m_targetIds;
}
}
[DataMember(EmitDefaultValue = false)]
public DeploymentExecutionOptions Execution
{
get;
set;
}
public override PhaseTarget Clone()
{
return new DeploymentGroupTarget(this);
}
public override Boolean IsValid(TaskDefinition task)
{
return task.RunsOn.Contains(TaskRunsOnConstants.RunsOnDeploymentGroup, StringComparer.OrdinalIgnoreCase);
}
internal override void Validate(
IPipelineContext context,
BuildOptions buildOptions,
ValidationResult result,
IList<Step> steps,
ISet<Demand> taskDemands)
{
this.Execution?.Validate(context, result);
}
internal override JobExecutionContext CreateJobContext(
PhaseExecutionContext context,
String jobName,
Int32 attempt,
Boolean continueOnError,
Int32 timeoutInMinutes,
Int32 cancelTimeoutInMinutes,
IJobFactory jobFactory)
{
context.Trace?.EnterProperty("CreateJobContext");
var result = new ParallelExecutionOptions().CreateJobContext(
context,
jobName,
attempt,
null,
null,
continueOnError,
timeoutInMinutes,
cancelTimeoutInMinutes,
jobFactory);
context.Trace?.LeaveProperty("CreateJobContext");
return result;
}
internal override ExpandPhaseResult Expand(
PhaseExecutionContext context,
Boolean continueOnError,
Int32 timeoutInMinutes,
Int32 cancelTimeoutInMinutes,
IJobFactory jobFactory,
JobExpansionOptions options)
{
context.Trace?.EnterProperty("Expand");
var result = new ParallelExecutionOptions().Expand(
context: context,
container: null,
sidecarContainers: null,
continueOnError: continueOnError,
timeoutInMinutes: timeoutInMinutes,
cancelTimeoutInMinutes: cancelTimeoutInMinutes,
jobFactory: jobFactory,
options: options);
context.Trace?.LeaveProperty("Expand");
return result;
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_tags?.Count == 0)
{
m_tags = null;
}
if (m_targetIds?.Count == 0)
{
m_targetIds = null;
}
}
[DataMember(Name = "Tags", EmitDefaultValue = false)]
private ISet<String> m_tags;
[DataMember(Name = "TargetIds")]
private List<Int32> m_targetIds;
}
}

View File

@@ -1,38 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class EnvironmentReference : ResourceReference
{
public EnvironmentReference()
{
}
private EnvironmentReference(EnvironmentReference referenceToCopy)
: base(referenceToCopy)
{
this.Id = referenceToCopy.Id;
}
[DataMember(EmitDefaultValue = false)]
public Int32 Id
{
get;
set;
}
public EnvironmentReference Clone()
{
return new EnvironmentReference(this);
}
public override String ToString()
{
return base.ToString() ?? this.Id.ToString();
}
}
}

View File

@@ -1,21 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public class EnvironmentDeploymentTarget
{
[DataMember]
public Int32 EnvironmentId { get; set; }
[DataMember]
public String EnvironmentName { get; set; }
[DataMember]
public EnvironmentResourceReference Resource { get; set; }
}
}

View File

@@ -1,97 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public class EnvironmentStore : IEnvironmentStore
{
public EnvironmentStore(
IList<EnvironmentInstance> environments,
IEnvironmentResolver resolver = null)
{
m_resolver = resolver;
m_environmentsByName = new Dictionary<String, EnvironmentInstance>(StringComparer.OrdinalIgnoreCase);
m_environmentsById = new Dictionary<Int32, EnvironmentInstance>();
Add(environments?.ToArray());
}
public void Add(params EnvironmentInstance[] environments)
{
if (environments is null)
{
return;
}
foreach (var e in environments)
{
if (e != null)
{
m_environmentsById[e.Id] = e;
var name = e.Name;
if (!string.IsNullOrWhiteSpace(name))
{
m_environmentsByName[name] = e;
}
}
}
}
public EnvironmentInstance ResolveEnvironment(String name)
{
if (!m_environmentsByName.TryGetValue(name, out var environment)
&& m_resolver != null)
{
environment = m_resolver?.Resolve(name);
Add(environment);
}
return environment;
}
public EnvironmentInstance ResolveEnvironment(Int32 id)
{
if (!m_environmentsById.TryGetValue(id, out var environment)
&& m_resolver != null)
{
environment = m_resolver?.Resolve(id);
Add(environment);
}
return environment;
}
public EnvironmentInstance Get(EnvironmentReference reference)
{
if (reference is null)
{
return null;
}
if (reference.Name?.IsLiteral == true)
{
return ResolveEnvironment(reference.Name.Literal);
}
return ResolveEnvironment(reference.Id);
}
public IList<EnvironmentReference> GetReferences()
{
return m_environmentsById.Values
.Select(x => new EnvironmentReference
{
Id = x.Id,
Name = x.Name
})
.ToList();
}
private IEnvironmentResolver m_resolver;
private IDictionary<String, EnvironmentInstance> m_environmentsByName;
private IDictionary<Int32, EnvironmentInstance> m_environmentsById;
}
}

View File

@@ -1,107 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a mechanism for controlling runtime behaviors.
/// </summary>
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public class ExecutionOptions
{
public ExecutionOptions()
{
}
/// <summary>
/// Gets or sets a value indicating whether or not to remove secrets from job message.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public Boolean RestrictSecrets
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating what scope the system jwt token will have.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public String SystemTokenScope
{
get;
set;
}
/// <summary>
/// Gets or sets value indicating any custom claims the system jwt token will have.
/// </summary>
public IDictionary<String, String> SystemTokenCustomClaims
{
get
{
if (m_systemTokenCustomClaims == null)
{
m_systemTokenCustomClaims = new Dictionary<String, String>();
}
return m_systemTokenCustomClaims;
}
}
/// <summary>
/// Gets or sets a value indicating what's the max number jobs we allow after expansion.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public Int32? MaxJobExpansion
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating the max parallelism slots available to overwrite MaxConcurrency of test job slicing
/// </summary>
[DataMember(EmitDefaultValue = false)]
public Int32? MaxParallelism
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating if we should allow expressions to define secured resources.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public Boolean EnableResourceExpressions
{
get;
set;
}
/// <summary>
/// Driven by FF: DistributedTask.LegalNodeNames
/// </summary>
[DataMember(EmitDefaultValue = false)]
public Boolean EnforceLegalNodeNames
{
get;
set;
}
/// <summary>
/// Allows hyphens in yaml names
/// </summary>
[DataMember(EmitDefaultValue = false)]
public Boolean AllowHyphenNames
{
get;
set;
}
[DataMember(Name = nameof(SystemTokenCustomClaims), EmitDefaultValue = false)]
private IDictionary<String, String> m_systemTokenCustomClaims;
}
}

View File

@@ -1,66 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.Pipelines.Runtime;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Represents the runtime values of a phase which has been expanded for execution.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public class ExpandPhaseResult
{
/// <summary>
/// Initializes a new <c>ExpandPhaseResult</c> innstance with a default maximum concurrency of 1.
/// </summary>
public ExpandPhaseResult()
{
this.MaxConcurrency = 1;
}
/// <summary>
/// Gets or sets the execution behavior when an error is encountered.
/// </summary>
public Boolean ContinueOnError
{
get;
set;
}
/// <summary>
/// Gets or sets the execution behavior when an error is encountered.
/// </summary>
public Boolean FailFast
{
get;
set;
}
/// <summary>
/// Gets or sets the maximum concurrency for the jobs.
/// </summary>
public Int32 MaxConcurrency
{
get;
set;
}
/// <summary>
/// Gets the list of jobs for this phase.
/// </summary>
public IList<JobInstance> Jobs
{
get
{
if (m_jobs == null)
{
m_jobs = new List<JobInstance>();
}
return m_jobs;
}
}
private List<JobInstance> m_jobs;
}
}

View File

@@ -1,58 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Represents the result of an <c>ExpressionValue&lt;T&gt;</c> evaluation.
/// </summary>
/// <typeparam name="T"></typeparam>
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class ExpressionResult<T>
{
/// <summary>
/// Initializes a new <c>ExpressionResult</c> instance with the specified value. The value is implicilty treated as
/// non-secret.
/// </summary>
/// <param name="value">The resolved value</param>
public ExpressionResult(T value)
: this(value, false)
{
}
/// <summary>
/// Initializes a new <c>ExpressionResult</c> instance with the specified values.
/// </summary>
/// <param name="value">The resolved value</param>
/// <param name="containsSecrets">True if secrets were accessed while resolving the value; otherwise, false</param>
public ExpressionResult(
T value,
Boolean containsSecrets)
{
this.ContainsSecrets = containsSecrets;
this.Value = value;
}
/// <summary>
/// Gets or sets a value indicating whether or not secrets were accessed while resolving <see cref="Value"/>.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public Boolean ContainsSecrets
{
get;
set;
}
/// <summary>
/// Gets or sets the literal value result.
/// </summary>
[DataMember]
public T Value
{
get;
set;
}
}
}

View File

@@ -146,26 +146,6 @@ namespace GitHub.DistributedTask.Pipelines
/// </summary>
internal Boolean IsLiteral => String.IsNullOrEmpty(m_expression);
/// <summary>
/// Retrieves the referenced value from the provided execution context.
/// </summary>
/// <param name="context">The execution context used for variable resolution</param>
/// <returns>The value of the variable if found; otherwise, null</returns>
public ExpressionResult<T> GetValue(IPipelineContext context = null)
{
if (this.IsLiteral)
{
return new ExpressionResult<T>(m_literalValue, containsSecrets: false);
}
if (context != null)
{
return context.Evaluate<T>(m_expression);
}
return null;
}
/// <summary>
/// Converts the value to a string representation.
/// </summary>

View File

@@ -1,28 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class CounterNode : FunctionNode
{
protected override Object EvaluateCore(EvaluationContext evaluationContext)
{
int seed = 0;
var prefix = String.Empty;
if (Parameters.Count > 0)
{
prefix = Parameters[0].EvaluateString(evaluationContext);
}
if (Parameters.Count > 1)
{
seed = Convert.ToInt32(Parameters[1].EvaluateNumber(evaluationContext));
}
var context = evaluationContext.State as IPipelineContext;
return context.CounterStore?.Increment(context, prefix, seed) ?? seed;
}
}
}

View File

@@ -1,30 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public static class ExpressionConstants
{
/// <summary>
/// Gets the name of the variables node.
/// </summary>
public static readonly String Variables = "variables";
/// <summary>
/// Gets the pipeline context available in pipeline expressions.
/// </summary>
public static readonly INamedValueInfo PipelineNamedValue = new NamedValueInfo<PipelineContextNode>("pipeline");
/// <summary>
/// Gets the variable context available in pipeline expressions.
/// </summary>
public static readonly INamedValueInfo VariablesNamedValue = new NamedValueInfo<VariablesContextNode>("variables");
/// <summary>
/// Gets the counter function available in pipeline expressions.
/// </summary>
public static readonly IFunctionInfo CounterFunction = new FunctionInfo<CounterNode>("counter", 0, 2);
}
}

View File

@@ -1,32 +0,0 @@
using System;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
internal static class InputValidationConstants
{
public static readonly String IsEmail = "isEmail";
public static readonly String IsInRange = "isInRange";
public static readonly String IsIPv4Address = "isIPv4Address";
public static readonly String IsSha1 = "isSha1";
public static readonly String IsUrl = "isUrl";
public static readonly String IsMatch = "isMatch";
public static readonly String Length = "length";
public static readonly IFunctionInfo[] Functions = new IFunctionInfo[]
{
new FunctionInfo<IsEmailNode>(InputValidationConstants.IsEmail, IsEmailNode.minParameters, IsEmailNode.maxParameters),
new FunctionInfo<IsInRangeNode>(InputValidationConstants.IsInRange, IsInRangeNode.minParameters, IsInRangeNode.maxParameters),
new FunctionInfo<IsIPv4AddressNode>(InputValidationConstants.IsIPv4Address, IsIPv4AddressNode.minParameters, IsIPv4AddressNode.maxParameters),
new FunctionInfo<IsMatchNode>(InputValidationConstants.IsMatch, IsMatchNode.minParameters, IsMatchNode.maxParameters),
new FunctionInfo<IsSHA1Node>(InputValidationConstants.IsSha1, IsSHA1Node.minParameters, IsSHA1Node.maxParameters),
new FunctionInfo<IsUrlNode>(InputValidationConstants.IsUrl, IsUrlNode.minParameters, IsUrlNode.maxParameters),
new FunctionInfo<LengthNode>(InputValidationConstants.Length, LengthNode.minParameters, LengthNode.maxParameters),
};
public static readonly INamedValueInfo[] NamedValues = new INamedValueInfo[]
{
new NamedValueInfo<InputValueNode>("value"),
};
}
}

View File

@@ -1,15 +0,0 @@
using System;
using GitHub.DistributedTask.Expressions;
using GitHub.DistributedTask.Pipelines.Validation;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
internal class InputValueNode : NamedValueNode
{
protected sealed override Object EvaluateCore(EvaluationContext evaluationContext)
{
var validationContext = evaluationContext.State as InputValidationContext;
return validationContext.Value;
}
}
}

View File

@@ -1,22 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class IsEmailNode : FunctionNode
{
protected sealed override Boolean TraceFullyRealized => false;
public static Int32 minParameters = 1;
public static Int32 maxParameters = 1;
protected sealed override Object EvaluateCore(EvaluationContext context)
{
// isEmail(value: string)
String value = Parameters[0].EvaluateString(context) ?? String.Empty;
return RegexUtility.IsMatch(value, WellKnownRegularExpressions.Email);
}
}
}

View File

@@ -1,22 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class IsIPv4AddressNode : FunctionNode
{
protected sealed override Boolean TraceFullyRealized => false;
public static Int32 minParameters = 1;
public static Int32 maxParameters = 1;
protected sealed override Object EvaluateCore(EvaluationContext context)
{
// isIpV4Address(value: string)
String value = Parameters[0].EvaluateString(context) ?? String.Empty;
return RegexUtility.IsMatch(value, WellKnownRegularExpressions.IPv4Address);
}
}
}

View File

@@ -1,24 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class IsInRangeNode : FunctionNode
{
protected sealed override Boolean TraceFullyRealized => false;
public static Int32 minParameters = 3;
public static Int32 maxParameters = 3;
protected sealed override Object EvaluateCore(EvaluationContext context)
{
// isInRange(value: string, min: string, max: string)
decimal value = Parameters[0].EvaluateNumber(context);
decimal min = Parameters[1].EvaluateNumber(context);
decimal max = Parameters[2].EvaluateNumber(context);
return value >= min && value <= max;
}
}
}

View File

@@ -1,30 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class IsMatchNode : FunctionNode
{
protected sealed override Boolean TraceFullyRealized => false;
public static Int32 minParameters = 2;
public static Int32 maxParameters = 3;
protected sealed override Object EvaluateCore(EvaluationContext context)
{
// isMatch(value: string, regEx: string, options?: string)
String value = Parameters[0].EvaluateString(context) ?? String.Empty;
String regEx = Parameters[1].EvaluateString(context) ?? String.Empty;
String regExOptionsString = String.Empty;
if (Parameters.Count == 3)
{
regExOptionsString = Parameters[2].EvaluateString(context) ?? String.Empty;
}
return RegexUtility.IsMatch(value, regEx, regExOptionsString);
}
}
}

View File

@@ -1,22 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class IsSHA1Node : FunctionNode
{
protected sealed override Boolean TraceFullyRealized => false;
public static Int32 minParameters = 1;
public static Int32 maxParameters = 1;
protected sealed override Object EvaluateCore(EvaluationContext context)
{
// isSha1(value: string)
String value = Parameters[0].EvaluateString(context) ?? String.Empty;
return RegexUtility.IsMatch(value, WellKnownRegularExpressions.SHA1);
}
}
}

View File

@@ -1,22 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class IsUrlNode : FunctionNode
{
protected sealed override Boolean TraceFullyRealized => false;
public static Int32 minParameters = 1;
public static Int32 maxParameters = 1;
protected sealed override Object EvaluateCore(EvaluationContext context)
{
// isUrl(value: string)
String value = Parameters[0].EvaluateString(context) ?? String.Empty;
return RegexUtility.IsMatch(value, WellKnownRegularExpressions.Url);
}
}
}

View File

@@ -1,63 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
using Newtonsoft.Json.Linq;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class LengthNode : FunctionNode
{
protected sealed override Boolean TraceFullyRealized => false;
public static Int32 minParameters = 1;
public static Int32 maxParameters = 1;
protected sealed override Object EvaluateCore(EvaluationContext context)
{
// Length(value: object)
var evaluationResult = Parameters[0].Evaluate(context);
bool kindNotSupported = false;
Int32 length = -1;
switch (evaluationResult.Kind)
{
case ValueKind.Array:
length = ((JArray)evaluationResult.Value).Count;
break;
case ValueKind.String:
length = ((String)evaluationResult.Value).Length;
break;
case ValueKind.Object:
if (evaluationResult.Value is IReadOnlyDictionary<String, Object>)
{
length = ((IReadOnlyDictionary<String, Object>)evaluationResult.Value).Count;
}
else if (evaluationResult.Value is ICollection)
{
length = ((ICollection)evaluationResult.Value).Count;
}
else
{
kindNotSupported = true;
}
break;
case ValueKind.Boolean:
case ValueKind.Null:
case ValueKind.Number:
case ValueKind.Version:
kindNotSupported = true;
break;
}
if (kindNotSupported)
{
throw new NotSupportedException(PipelineStrings.InvalidTypeForLengthFunction(evaluationResult.Kind));
}
return new Decimal(length);
}
}
}

View File

@@ -1,32 +0,0 @@
using System;
using System.ComponentModel;
using System.Collections.Generic;
using GitHub.DistributedTask.Expressions;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
internal sealed class PipelineContextNode : NamedValueNode
{
protected override Object EvaluateCore(EvaluationContext context)
{
var state = context.State as IPipelineContext;
var result = new Dictionary<String, Object>(StringComparer.OrdinalIgnoreCase);
// startTime
if (state.Variables.TryGetValue(WellKnownDistributedTaskVariables.PipelineStartTime, out VariableValue startTimeVariable) &&
!String.IsNullOrEmpty(startTimeVariable.Value))
{
// Leverage the expression SDK to convert to datetime
var startTimeResult = EvaluationResult.CreateIntermediateResult(context, startTimeVariable.Value, out _);
if (startTimeResult.TryConvertToDateTime(context, out DateTimeOffset startTime))
{
result["startTime"] = startTime;
}
}
return result;
}
}
}

View File

@@ -1,16 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions;
namespace GitHub.DistributedTask.Pipelines.Expressions
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class VariablesContextNode : NamedValueNode
{
protected override Object EvaluateCore(EvaluationContext context)
{
var executionContext = context.State as IPipelineContext;
return executionContext.Variables;
}
}
}

View File

@@ -1,247 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.Expressions;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.Pipelines.Runtime;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public abstract class GraphCondition<TInstance> where TInstance : IGraphNodeInstance
{
private protected GraphCondition(String condition)
{
m_condition = !String.IsNullOrEmpty(condition) ? condition : Default;
m_parser = new ExpressionParser();
m_parsedCondition = m_parser.CreateTree(m_condition, new ConditionTraceWriter(), s_namedValueInfo, FunctionInfo);
}
/// <summary>
/// Gets the default condition if none is specified
/// </summary>
public static String Default
{
get
{
return $"{PipelineTemplateConstants.Success}()";
}
}
/// <summary>
/// Gets a value indicating whether the event payload is used within the condition
/// </summary>
public Boolean RequiresEventPayload
{
get
{
CheckRequiredProperties();
return m_requiresEventPayload.Value;
}
}
/// <summary>
/// Gets a value indicating whether dependency outputs are used within the condition
/// </summary>
public Boolean RequiresOutputs
{
get
{
CheckRequiredProperties();
return m_requiresOutputs.Value;
}
}
/// <summary>
/// Gets a value indicating whether variables are used within the condition
/// </summary>
public Boolean RequiresVariables
{
get
{
return false;
}
}
private void CheckRequiredProperties()
{
var matches = m_parsedCondition.CheckReferencesContext(PipelineTemplateConstants.EventPattern, PipelineTemplateConstants.OutputsPattern);
m_requiresEventPayload = matches[0];
m_requiresOutputs = matches[1];
}
private static IEnumerable<DictionaryContextData> GetNeeds(
IReadOnlyList<ExpressionNode> parameters,
EvaluationContext context,
GraphExecutionContext<TInstance> expressionContext)
{
if (expressionContext.Data.TryGetValue(PipelineTemplateConstants.Needs, out var needsData) &&
needsData is DictionaryContextData needs)
{
if (parameters.Count == 0)
{
foreach (var pair in needs)
{
yield return pair.Value as DictionaryContextData;
}
}
else
{
foreach (var parameter in parameters)
{
var parameterResult = parameter.Evaluate(context);
var dependencyName = default(String);
if (parameterResult.IsPrimitive)
{
dependencyName = parameterResult.ConvertToString();
}
if (!String.IsNullOrEmpty(dependencyName) &&
needs.TryGetValue(dependencyName, out var need))
{
yield return need as DictionaryContextData;
}
else
{
yield return default;
}
}
}
}
}
private readonly String m_condition;
private readonly ExpressionParser m_parser;
private Boolean? m_requiresEventPayload;
private Boolean? m_requiresOutputs;
protected readonly IExpressionNode m_parsedCondition;
private static readonly INamedValueInfo[] s_namedValueInfo = new INamedValueInfo[]
{
new NamedValueInfo<GraphConditionNamedValue<TInstance>>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<GraphConditionNamedValue<TInstance>>(PipelineTemplateConstants.Needs),
};
public static readonly IFunctionInfo[] FunctionInfo = new IFunctionInfo[]
{
new FunctionInfo<AlwaysFunction>(PipelineTemplateConstants.Always, 0, 0),
new FunctionInfo<FailureFunction>(PipelineTemplateConstants.Failure, 0, Int32.MaxValue),
new FunctionInfo<CancelledFunction>(PipelineTemplateConstants.Cancelled, 0, 0),
new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, Int32.MaxValue),
};
protected sealed class ConditionTraceWriter : ITraceWriter
{
public String Trace
{
get
{
return m_info.ToString();
}
}
public void Info(String message)
{
m_info.AppendLine(message);
}
public void Verbose(String message)
{
// Not interested
}
private StringBuilder m_info = new StringBuilder();
}
private sealed class AlwaysFunction : Function
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
return true;
}
}
private sealed class CancelledFunction : Function
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
var conditionContext = context.State as GraphExecutionContext<TInstance>;
return conditionContext.State == PipelineState.Canceling;
}
}
private sealed class FailureFunction : Function
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
var conditionContext = context.State as GraphExecutionContext<TInstance>;
if (conditionContext.State != PipelineState.InProgress)
{
return false;
}
Boolean anyFailed = false;
foreach (var need in GetNeeds(Parameters, context, conditionContext))
{
if (need == null ||
!need.TryGetValue(PipelineTemplateConstants.Result, out var resultData) ||
!(resultData is StringContextData resultString))
{
return false;
}
if (String.Equals(resultString, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase))
{
anyFailed = true;
break;
}
}
return anyFailed;
}
}
private sealed class SuccessFunction : Function
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
var conditionContext = context.State as GraphExecutionContext<TInstance>;
if (conditionContext.State != PipelineState.InProgress)
{
return false;
}
Boolean allSucceeded = true;
foreach (var need in GetNeeds(Parameters, context, conditionContext))
{
if (!allSucceeded ||
need == null ||
!need.TryGetValue(PipelineTemplateConstants.Result, out var resultData) ||
!(resultData is StringContextData resultString) ||
!String.Equals(resultString, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase))
{
return false;
}
}
return true;
}
}
}
}

View File

@@ -1,86 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public class GroupStep : JobStep
{
[JsonConstructor]
public GroupStep()
{
}
private GroupStep(GroupStep groupStepToClone)
: base(groupStepToClone)
{
if (groupStepToClone.m_steps?.Count > 0)
{
foreach (var step in groupStepToClone.m_steps)
{
this.Steps.Add(step.Clone() as TaskStep);
}
}
if (groupStepToClone.m_outputs?.Count > 0)
{
this.m_outputs = new Dictionary<String, String>(groupStepToClone.m_outputs, StringComparer.OrdinalIgnoreCase);
}
}
public override StepType Type => StepType.Group;
public IList<TaskStep> Steps
{
get
{
if (m_steps == null)
{
m_steps = new List<TaskStep>();
}
return m_steps;
}
}
public IDictionary<String, String> Outputs
{
get
{
if (m_outputs == null)
{
m_outputs = new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
}
return m_outputs;
}
}
public override Step Clone()
{
return new GroupStep(this);
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_steps?.Count == 0)
{
m_steps = null;
}
if (m_outputs?.Count == 0)
{
m_outputs = null;
}
}
[DataMember(Name = "Steps", EmitDefaultValue = false)]
private IList<TaskStep> m_steps;
[DataMember(Name = "Outputs", EmitDefaultValue = false)]
private IDictionary<String, String> m_outputs;
}
}

View File

@@ -1,37 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a mechanism of resolving an <c>AgentPoolReference</c> to a <c>TaskAgentPool</c>.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IAgentPoolResolver
{
/// <summary>
/// Attempts to resolve the agent pool references to <c>TaskAgentPool</c> instances.
/// </summary>
/// <param name="references">The agent pools which should be resolved</param>
/// <returns>A list containing the resolved agent pools</returns>
IList<TaskAgentPool> Resolve(ICollection<AgentPoolReference> references);
}
[EditorBrowsable(EditorBrowsableState.Never)]
public static class IAgentPoolResolverExtensions
{
/// <summary>
/// Attempts to resolve the agent pool reference to a <c>TaskAgentPool</c>.
/// </summary>
/// <param name="reference">The agent pool which should be resolved</param>
/// <returns>The agent pool if resolved; otherwise, null</returns>
public static TaskAgentPool Resolve(
this IAgentPoolResolver resolver,
AgentPoolReference reference)
{
return resolver.Resolve(new[] { reference }).FirstOrDefault();
}
}
}

View File

@@ -1,27 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IAgentPoolStore
{
/// <summary>
/// Adds a reference which should be considered authorized. Future
/// calls to retrieve this resource will be treated as pre-authorized regardless
/// of authorization context used.
/// </summary>
/// <param name="pools">The pools which should be authorized</param>
void Authorize(IList<AgentPoolReference> pools);
IList<AgentPoolReference> GetAuthorizedReferences();
TaskAgentPool Get(AgentPoolReference reference);
/// <summary>
/// Gets the <c>IAgentPoolResolver</c> used by this store, if any.
/// </summary>
IAgentPoolResolver Resolver { get; }
}
}

View File

@@ -1,37 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a mechanism of resolving an <c>AgentQueueReference</c> to a <c>TaskAgentQueue</c>.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IAgentQueueResolver
{
/// <summary>
/// Attempts to resolve the agent queue references to <c>TaskAgentQueue</c> instances.
/// </summary>
/// <param name="references">The agent queues which should be resolved</param>
/// <returns>A list containing the resolved agent queues</returns>
IList<TaskAgentQueue> Resolve(ICollection<AgentQueueReference> references);
}
[EditorBrowsable(EditorBrowsableState.Never)]
public static class IAgentQueueResolverExtensions
{
/// <summary>
/// Attempts to resolve the agent queue reference to a <c>TaskAgentQueue</c>.
/// </summary>
/// <param name="reference">The agent queue which should be resolved</param>
/// <returns>The agent queue if resolved; otherwise, null</returns>
public static TaskAgentQueue Resolve(
this IAgentQueueResolver resolver,
AgentQueueReference reference)
{
return resolver.Resolve(new[] { reference }).FirstOrDefault();
}
}
}

View File

@@ -1,27 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IAgentQueueStore
{
/// <summary>
/// Adds a reference which should be considered authorized. Future
/// calls to retrieve this resource will be treated as pre-authorized regardless
/// of authorization context used.
/// </summary>
/// <param name="reference">The queue which should be authorized</param>
void Authorize(IList<TaskAgentQueue> queues);
IList<AgentQueueReference> GetAuthorizedReferences();
TaskAgentQueue Get(AgentQueueReference reference);
/// <summary>
/// Gets the <c>IAgentQueueResolver</c> used by this store, if any.
/// </summary>
IAgentQueueResolver Resolver { get; }
}
}

View File

@@ -1,11 +0,0 @@
using System;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface ICounterResolver
{
Int32 Increment(IPipelineContext context, String prefix, Int32 seed);
}
}

View File

@@ -1,24 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface ICounterStore
{
/// <summary>
/// Gets the counters which are allocated for this store.
/// </summary>
IReadOnlyDictionary<String, Int32> Counters { get; }
/// <summary>
/// Increments the counter with the given prefix. If no such counter exists, a new one will be created with
/// <paramref name="seed"/> as the initial value.
/// </summary>
/// <param name="prefix">The counter prefix</param>
/// <param name="seed">The initial value for the counter if the counter does not exist</param>
/// <returns>The incremented value</returns>
Int32 Increment(IPipelineContext context, String prefix, Int32 seed);
}
}

View File

@@ -1,14 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IEnvironmentResolver
{
EnvironmentInstance Resolve(String environmentName);
EnvironmentInstance Resolve(Int32 environmentId);
}
}

View File

@@ -1,22 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a contract for resolving environment from a given store.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IEnvironmentStore
{
EnvironmentInstance ResolveEnvironment(String environmentName);
EnvironmentInstance ResolveEnvironment(Int32 environmentId);
EnvironmentInstance Get(EnvironmentReference reference);
IList<EnvironmentReference> GetReferences();
}
}

View File

@@ -1,51 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.Pipelines.Validation;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IGraphNode
{
String Name
{
get;
set;
}
String DisplayName
{
get;
set;
}
String Condition
{
get;
set;
}
ISet<String> DependsOn
{
get;
}
void Validate(PipelineBuildContext context, ValidationResult result);
}
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IGraphNodeInstance
{
Int32 Attempt { get; set; }
String Identifier { get; set; }
String Name { get; set; }
DateTime? StartTime { get; set; }
DateTime? FinishTime { get; set; }
TaskResult? Result { get; set; }
Boolean SecretsAccessed { get; }
IDictionary<String, VariableValue> Outputs { get; }
void ResetSecretsAccessed();
}
}

View File

@@ -1,20 +0,0 @@
using System;
using System.Collections.Generic;
using GitHub.DistributedTask.Pipelines.Runtime;
namespace GitHub.DistributedTask.Pipelines
{
internal interface IJobFactory
{
String Name { get; }
Job CreateJob(
JobExecutionContext context,
ExpressionValue<String> container,
IDictionary<String, ExpressionValue<String>> sidecarContainers,
Boolean continueOnError,
Int32 timeoutInMinutes,
Int32 cancelTimeoutInMinutes,
String displayName = null);
}
}

View File

@@ -1,12 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IPackageStore
{
PackageVersion GetLatestVersion(String packageType);
}
}

View File

@@ -1,20 +0,0 @@
using System;
using GitHub.DistributedTask.Pipelines.Validation;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// This is a temprary extension point for provider phase to participate in pipeline resource discover
/// This extension point can be removed after we have the schema driven resource discover
/// </summary>
public interface IPhaseProvider
{
String Provider { get; }
/// <summary>
/// Validate pipeline with builder context to provide additional validation errors
/// and pipeline resource discover.
/// </summary>
ValidationResult Validate(PipelineBuildContext context, ProviderPhase phase);
}
}

View File

@@ -1,59 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Logging;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using Newtonsoft.Json.Linq;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides the environment and services available during build and execution of a pipeline.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IPipelineContext
{
ICounterStore CounterStore { get; }
DictionaryContextData Data { get; }
Int32 EnvironmentVersion { get; }
EvaluationOptions ExpressionOptions { get; }
IPipelineIdGenerator IdGenerator { get; }
IPackageStore PackageStore { get; }
PipelineResources ReferencedResources { get; }
IResourceStore ResourceStore { get; }
IReadOnlyList<IStepProvider> StepProviders { get; }
ISecretMasker SecretMasker { get; }
ITaskStore TaskStore { get; }
IPipelineTraceWriter Trace { get; }
ISet<String> SystemVariableNames { get; }
IDictionary<String, VariableValue> Variables { get; }
String ExpandVariables(String value, Boolean maskSecrets = false);
ExpressionResult<T> Evaluate<T>(String expression);
ExpressionResult<JObject> Evaluate(JObject value);
}
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IPipelineTraceWriter : ITraceWriter
{
void EnterProperty(String name);
void LeaveProperty(String name);
}
}

View File

@@ -1,81 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.Services.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public static class IPipelineContextExtensions
{
/// <summary>
/// Uses the current context to validate the steps provided.
/// </summary>
/// <param name="context">The current pipeline context</param>
/// <param name="steps">The list of steps which should be validated</param>
/// <param name="options">The options controlling the level of validation performed</param>
/// <returns>A list of validation errors which were encountered, if any</returns>
public static IList<PipelineValidationError> Validate(
this IPipelineContext context,
IList<Step> steps,
PhaseTarget target,
BuildOptions options)
{
var builder = new PipelineBuilder(context);
return builder.Validate(steps, target, options);
}
/// <summary>
/// Evaluates a property which is specified as an expression and writes the resulting value to the
/// corresponding trace log if one is specified on the context.
/// </summary>
/// <typeparam name="T">The result type of the expression</typeparam>
/// <param name="context">The pipeline context</param>
/// <param name="name">The name of the property being evaluated</param>
/// <param name="expression">The expression which should be evaluated</param>
/// <param name="defaultValue">The default value if no expression is specified</param>
/// <param name="traceDefault">True to write the default value if no expression is specified; otherwise, false</param>
/// <returns>The result of the expression evaluation</returns>
internal static ExpressionResult<T> Evaluate<T>(
this IPipelineContext context,
String name,
ExpressionValue<T> expression,
T defaultValue,
Boolean traceDefault = true)
{
ExpressionResult<T> result = null;
if (expression != null)
{
if (expression.IsLiteral)
{
context.Trace?.Info($"{name}: {GetTraceValue(expression.Literal)}");
result = new ExpressionResult<T>(expression.Literal);
}
else
{
context.Trace?.EnterProperty(name);
result = expression.GetValue(context);
context.Trace?.LeaveProperty(name);
}
}
else if (traceDefault && context.Trace != null)
{
context.Trace.Info($"{name}: {defaultValue}");
}
return result ?? new ExpressionResult<T>(defaultValue);
}
private static String GetTraceValue<T>(T value)
{
if (value.GetType().IsValueType)
{
return value.ToString();
}
else
{
return $"{System.Environment.NewLine}{JsonUtility.ToString(value, indent: true)}";
}
}
}
}

View File

@@ -1,35 +0,0 @@
using System;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IPipelineIdGenerator
{
Guid GetInstanceId(params String[] segments);
String GetInstanceName(params String[] segments);
String GetStageIdentifier(String stageName);
Guid GetStageInstanceId(String stageName, Int32 attempt);
String GetStageInstanceName(String stageName, Int32 attempt);
String GetPhaseIdentifier(String stageName, String phaseName);
Guid GetPhaseInstanceId(String stageName, String phaseName, Int32 attempt);
String GetPhaseInstanceName(String stageName, String phaseName, Int32 attempt);
String GetJobIdentifier(String stageName, String phaseName, String jobName);
Guid GetJobInstanceId(String stageName, String phaseName, String jobName, Int32 attempt);
String GetJobInstanceName(String stageName, String phaseName, String jobName, Int32 attempt);
Guid GetTaskInstanceId(String stageName, String phaseName, String jobName, Int32 jobAttempt, String name3);
String GetTaskInstanceName(String stageName, String phaseName, String jobName, Int32 jobAttempt, String name);
}
}

View File

@@ -1,87 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.Pipelines.Artifacts;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
public interface IBuildStore : IStepProvider
{
void Add(BuildResource resource);
void Add(IEnumerable<BuildResource> resources);
BuildResource Get(String alias);
IEnumerable<BuildResource> GetAll();
IArtifactResolver Resolver { get; }
}
public interface IContainerStore
{
void Add(ContainerResource resource);
void Add(IEnumerable<ContainerResource> resources);
ContainerResource Get(String alias);
IEnumerable<ContainerResource> GetAll();
}
public interface IPipelineStore : IStepProvider
{
void Add(PipelineResource resource);
void Add(IEnumerable<PipelineResource> resources);
PipelineResource Get(String alias);
IEnumerable<PipelineResource> GetAll();
}
public interface IRepositoryStore : IStepProvider
{
void Add(RepositoryResource resource);
void Add(IEnumerable<RepositoryResource> resources);
RepositoryResource Get(String alias);
IEnumerable<RepositoryResource> GetAll();
}
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IResourceStore : IStepProvider
{
IBuildStore Builds { get; }
IContainerStore Containers { get; }
IServiceEndpointStore Endpoints { get; }
ISecureFileStore Files { get; }
IEnvironmentStore Environments { get; }
IPipelineStore Pipelines { get; }
IAgentQueueStore Queues { get; }
IAgentPoolStore Pools { get; }
IRepositoryStore Repositories { get; }
IVariableGroupStore VariableGroups { get; }
PipelineResources GetAuthorizedResources();
ServiceEndpoint GetEndpoint(Guid endpointId);
ServiceEndpoint GetEndpoint(String endpointId);
SecureFile GetFile(Guid fileId);
SecureFile GetFile(String fileId);
TaskAgentQueue GetQueue(Int32 queueId);
TaskAgentQueue GetQueue(String queueId);
TaskAgentPool GetPool(Int32 poolId);
TaskAgentPool GetPool(String poolName);
VariableGroup GetVariableGroup(Int32 groupId);
VariableGroup GetVariableGroup(String groupId);
}
}

View File

@@ -1,198 +0,0 @@
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public static class IResourceStoreExtensions
{
/// <summary>
/// Extracts the full resources from the <paramref name="store"/> which are referenced in the
/// <paramref name="resources"/> collection.
/// </summary>
/// <param name="store">The store which contains the resources</param>
/// <param name="resources">The resources which should be included with the job</param>
/// <returns>A new <c>JobResources</c> instance with the filtered set of resources from the store</returns>
public static JobResources GetJobResources(
this IResourceStore store,
PipelineResources resources)
{
var jobResources = new JobResources();
jobResources.Containers.AddRange(resources.Containers.Select(x => x.Clone()));
foreach (var endpointRef in resources.Endpoints)
{
var endpoint = store.Endpoints.Get(endpointRef);
if (endpoint != null)
{
jobResources.Endpoints.Add(endpoint);
}
}
foreach (var fileRef in resources.Files)
{
var file = store.Files.Get(fileRef);
if (file != null)
{
jobResources.SecureFiles.Add(file);
}
}
foreach (var repository in resources.Repositories)
{
jobResources.Repositories.Add(store.Repositories.Get(repository.Alias));
}
return jobResources;
}
/// <summary>
/// Retrieves a service endpoint from the store using the provided reference.
/// </summary>
/// <param name="store">The resource store which should be queried</param>
/// <param name="reference">The service endpoint reference which should be resolved</param>
/// <returns>A <c>ServiceEndpoint</c> instance matching the specified reference if found; otherwise, null</returns>
public static ServiceEndpoint GetEndpoint(
this IResourceStore store,
ServiceEndpointReference reference)
{
return store.Endpoints.Get(reference);
}
/// <summary>
/// Retrieves a secure file from the store using the provided reference.
/// </summary>
/// <param name="store">The resource store which should be queried</param>
/// <param name="reference">The secure file reference which should be resolved</param>
/// <returns>A <c>SecureFile</c> instance matching the specified reference if found; otherwise, null</returns>
public static SecureFile GetFile(
this IResourceStore store,
SecureFileReference reference)
{
return store.Files.Get(reference);
}
/// <summary>
/// Retrieves an agent queue from the store using the provided reference.
/// </summary>
/// <param name="store">The resource store which should be queried</param>
/// <param name="reference">The agent queue reference which should be resolved</param>
/// <returns>A <c>TaskAgentQueue</c> instance matching the specified reference if found; otherwise, null</returns>
public static TaskAgentQueue GetQueue(
this IResourceStore store,
AgentQueueReference reference)
{
return store.Queues.Get(reference);
}
/// <summary>
/// Retrieves an agent pool from the store using the provided reference.
/// </summary>
/// <param name="store">The resource store which should be queried</param>
/// <param name="reference">The agent pool reference which should be resolved</param>
/// <returns>A <c>TaskAgentPool</c> instance matching the specified reference if found; otherwise, null</returns>
public static TaskAgentPool GetPool(
this IResourceStore store,
AgentPoolReference reference)
{
return store.Pools.Get(reference);
}
/// <summary>
/// Retrieves a variable group from the store using the provided reference.
/// </summary>
/// <param name="store">The resource store which should be queried</param>
/// <param name="reference">The variable group reference which should be resolved</param>
/// <returns>A <c>VariableGroup</c> instance matching the specified reference if found; otherwise, null</returns>
public static VariableGroup GetVariableGroup(
this IResourceStore store,
VariableGroupReference reference)
{
return store.VariableGroups.Get(reference);
}
/// <summary>
/// Given a partially formed reference, returns the associated reference stored with the plan.
/// </summary>
public static ResourceReference GetSnappedReference(
this IResourceStore store,
ResourceReference r)
{
if (r is VariableGroupReference vgr)
{
var m = store.VariableGroups.Get(vgr);
if (m != null)
{
return new VariableGroupReference
{
Id = m.Id,
Name = m.Name
};
}
}
else if (r is AgentQueueReference aqr)
{
var m = store.Queues.Get(aqr);
if (m != null)
{
return new AgentQueueReference
{
Id = m.Id,
Name = m.Name
};
}
}
else if (r is AgentPoolReference apr)
{
var m = store.Pools.Get(apr);
if (m != null)
{
return new AgentPoolReference
{
Id = m.Id,
Name = m.Name
};
}
}
else if (r is ServiceEndpointReference ser)
{
var m = store.Endpoints.Get(ser);
if (m != null)
{
return new ServiceEndpointReference
{
Id = m.Id,
Name = m.Name
};
}
}
else if (r is SecureFileReference sfr)
{
var m = store.Files.Get(sfr);
if (m != null)
{
return new SecureFileReference
{
Id = m.Id,
Name = m.Name
};
}
}
else if (r is EnvironmentReference er)
{
var m = store.Environments.Get(er);
if (m != null)
{
return new EnvironmentReference
{
Id = m.Id,
Name = m.Name
};
}
}
return r;
}
}
}

View File

@@ -1,37 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a mechanism of resolving an <c>SecureFileReference</c> to a <c>SecureFile</c>.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface ISecureFileResolver
{
/// <summary>
/// Attempts to resolve secure file references to a <c>SecureFile</c> instances.
/// </summary>
/// <param name="reference">The file references which should be resolved</param>
/// <returns>The resolved secure files</returns>
IList<SecureFile> Resolve(ICollection<SecureFileReference> references);
}
[EditorBrowsable(EditorBrowsableState.Never)]
public static class ISecureFileResolverExtensions
{
/// <summary>
/// Attempts to resolve the secure file reference to a <c>SecureFile</c>.
/// </summary>
/// <param name="reference">The file reference which should be resolved</param>
/// <returns>The secure file if resolved; otherwise, null</returns>
public static SecureFile Resolve(
this ISecureFileResolver resolver,
SecureFileReference reference)
{
return resolver.Resolve(new[] { reference }).FirstOrDefault();
}
}
}

View File

@@ -1,19 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface ISecureFileStore
{
IList<SecureFileReference> GetAuthorizedReferences();
SecureFile Get(SecureFileReference reference);
/// <summary>
/// Gets the <c>ISecureFileResolver</c> used by this store, if any.
/// </summary>
ISecureFileResolver Resolver { get; }
}
}

View File

@@ -1,46 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a mechanism of resolving an <c>ServiceEndpointReference</c> to a <c>ServiceEndpoint</c>.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IServiceEndpointResolver
{
/// <summary>
/// Adds the endpoint reference as authorized to ensure future retrievals of the endpoint
/// are allowed regardless of security context.
/// </summary>
/// <param name="reference">The endpoint reference which should be considered authorized</param>
void Authorize(ServiceEndpointReference reference);
/// <summary>
/// Attempts to resolve endpoint references to <c>ServiceEndpoint</c> instances.
/// </summary>
/// <param name="references">The endpoint references which should be resolved</param>
/// <returns>The resolved service endpoints</returns>
IList<ServiceEndpoint> Resolve(ICollection<ServiceEndpointReference> references);
IList<ServiceEndpointReference> GetAuthorizedReferences();
}
[EditorBrowsable(EditorBrowsableState.Never)]
public static class IServiceEndpointResolverExtensions
{
/// <summary>
/// Attempts to resolve the endpoint reference to a <c>ServiceEndpoint</c>.
/// </summary>
/// <param name="reference">The endpoint reference which should be resolved</param>
/// <returns>The service endpoint if resolved; otherwise, null</returns>
public static ServiceEndpoint Resolve(
this IServiceEndpointResolver resolver,
ServiceEndpointReference reference)
{
return resolver.Resolve(new[] { reference }).FirstOrDefault();
}
}
}

View File

@@ -1,39 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides access to service endpoints which are referenced within a pipeline.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IServiceEndpointStore
{
/// <summary>
/// Retrieves the list of all endpoints authorized for use in this store.
/// </summary>
/// <returns>The list of <c>ServiceEndpointReference</c> objects authorized for use</returns>
IList<ServiceEndpointReference> GetAuthorizedReferences();
/// <summary>
/// Adds an endpoint reference which should be considered authorized. Future
/// calls to retrieve this resource will be treated as pre-authorized regardless
/// of authorization context used.
/// </summary>
/// <param name="endpoint">The endpoint which should be authorized</param>
void Authorize(ServiceEndpointReference endpoint);
/// <summary>
/// Attempts to authorize an endpoint for use.
/// </summary>
/// <param name="endpoint">The endpoint reference to be resolved</param>
/// <returns>The endpoint if found and authorized; otherwise, null</returns>
ServiceEndpoint Get(ServiceEndpointReference endpoint);
/// <summary>
/// Gets the <c>IServiceEndpointResolver</c> used by this store, if any.
/// </summary>
IServiceEndpointResolver Resolver { get; }
}
}

View File

@@ -1,21 +0,0 @@
using System;
using System.Collections.Generic;
namespace GitHub.DistributedTask.Pipelines
{
public interface IStepProvider
{
IList<TaskStep> GetPreSteps(IPipelineContext context, IReadOnlyList<JobStep> steps);
Dictionary<Guid, List<TaskStep>> GetPostTaskSteps(IPipelineContext context, IReadOnlyList<JobStep> steps);
IList<TaskStep> GetPostSteps(IPipelineContext context, IReadOnlyList<JobStep> steps);
/// <summary>
/// Given a JobStep (eg., download step) it will translate into corresndponding task steps
/// </summary>
/// <param name="context"></param>
/// <param name="step">Input step to be resolved</param>
/// <param name="resolvedSteps">Resolved output steps</param>
/// <returns>true if this is resolved, false otherwise. Passing a powershell step to ResolveStep would return false</returns>
Boolean ResolveStep(IPipelineContext context, JobStep step, out IList<TaskStep> resolvedSteps);
}
}

View File

@@ -1,12 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface ITaskResolver
{
TaskDefinition Resolve(Guid taskId, String versionSpec);
}
}

View File

@@ -1,30 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a contract for resolving tasks from a given store.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface ITaskStore
{
/// <summary>
/// Resolves a task from the store using the unqiue identifier and version.
/// </summary>
/// <param name="taskId">The unique identifier of the task</param>
/// <param name="version">The version of the task which is desired</param>
/// <returns>The closest matching task definition if found; otherwise, null</returns>
TaskDefinition ResolveTask(Guid taskId, String version);
/// <summary>
/// Resolves a task from the store using the specified name and version.
/// </summary>
/// <param name="name">The name of the task</param>
/// <param name="version">The version of the task which is desired</param>
/// <returns>The closest matching task definition if found; otherwise, null</returns>
TaskDefinition ResolveTask(String name, String version);
}
}

View File

@@ -1,14 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface ITaskTemplateResolver
{
Boolean CanResolve(TaskTemplateReference template);
IList<TaskStep> ResolveTasks(TaskTemplateStep template);
}
}

View File

@@ -1,16 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a mechanism for task templates to be resolved at build time.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface ITaskTemplateStore
{
void AddProvider(ITaskTemplateResolver provider);
IEnumerable<TaskStep> ResolveTasks(TaskTemplateStep step);
}
}

View File

@@ -1,88 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.DistributedTask.Pipelines
{
public enum VariableType
{
Inline = 0,
Group = 1,
}
[JsonConverter(typeof(VariableJsonConverter))]
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IVariable
{
VariableType Type { get; }
}
internal class VariableJsonConverter : VssSecureJsonConverter
{
public VariableJsonConverter()
{
}
public override Boolean CanWrite
{
get
{
return false;
}
}
public override Boolean CanConvert(Type objectType)
{
return typeof(IVariable).IsAssignableFrom(objectType);
}
public override Object ReadJson(JsonReader reader, Type objectType, Object existingValue, JsonSerializer serializer)
{
if (reader.TokenType != JsonToken.StartObject)
{
return null;
}
var resultObj = JObject.Load(reader);
var variableType = VariableType.Inline;
if (resultObj.TryGetValue("type", StringComparison.OrdinalIgnoreCase, out var rawValue))
{
if (rawValue.Type == JTokenType.Integer)
{
variableType = (VariableType)(Int32)rawValue;
}
if (rawValue.Type == JTokenType.String)
{
variableType = (VariableType)Enum.Parse(typeof(VariableType), (String)rawValue, true);
}
}
else if (resultObj.TryGetValue("id", StringComparison.OrdinalIgnoreCase, out _) ||
resultObj.TryGetValue("groupType", StringComparison.OrdinalIgnoreCase, out _) ||
resultObj.TryGetValue("secretStore", StringComparison.OrdinalIgnoreCase, out _))
{
variableType = VariableType.Group;
}
IVariable result = null;
switch (variableType)
{
case VariableType.Group:
result = new VariableGroupReference();
break;
default:
result = new Variable();
break;
}
using (var objectReader = resultObj.CreateReader())
{
serializer.Populate(objectReader, result);
}
return result;
}
}
}

View File

@@ -1,32 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Provides a mechanism of resolving an <c>VariableGroupReference</c> to a <c>VariableGroup</c>.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IVariableGroupResolver
{
/// <summary>
/// Attempts to resolve variable group references to <c>VariableGroup</c> instances.
/// </summary>
/// <param name="reference">The variable groups which should be resolved</param>
/// <returns>The resolved variable groups</returns>
IList<VariableGroup> Resolve(ICollection<VariableGroupReference> references);
}
[EditorBrowsable(EditorBrowsableState.Never)]
public static class IVariableGroupResolverExtensions
{
public static VariableGroup Resolve(
this IVariableGroupResolver resolver,
VariableGroupReference reference)
{
return resolver.Resolve(new[] { reference }).FirstOrDefault();
}
}
}

View File

@@ -1,21 +0,0 @@
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IVariableGroupStore : IStepProvider
{
IList<VariableGroupReference> GetAuthorizedReferences();
VariableGroup Get(VariableGroupReference queue);
IVariableValueProvider GetValueProvider(VariableGroupReference queue);
/// <summary>
/// Gets the <c>IVariableGroupsResolver</c> used by this store, if any.
/// </summary>
IVariableGroupResolver Resolver { get; }
}
}

View File

@@ -1,22 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IVariableValueProvider
{
String GroupType
{
get;
}
Boolean ShouldGetValues(IPipelineContext context);
IList<TaskStep> GetSteps(IPipelineContext context, VariableGroupReference group, IEnumerable<String> keys);
IDictionary<String, VariableValue> GetValues(VariableGroup group, ServiceEndpoint endpoint, IEnumerable<String> keys, Boolean includeSecrets);
}
}

View File

@@ -1,291 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.Runtime;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class Job
{
[JsonConstructor]
public Job()
{
}
private Job(Job jobToCopy)
{
this.Id = jobToCopy.Id;
this.Name = jobToCopy.Name;
this.DisplayName = jobToCopy.DisplayName;
this.Container = jobToCopy.Container?.Clone();
this.ServiceContainers = jobToCopy.ServiceContainers?.Clone();
this.ContinueOnError = jobToCopy.ContinueOnError;
this.TimeoutInMinutes = jobToCopy.TimeoutInMinutes;
this.CancelTimeoutInMinutes = jobToCopy.CancelTimeoutInMinutes;
this.Workspace = jobToCopy.Workspace?.Clone();
this.Target = jobToCopy.Target?.Clone();
this.EnvironmentVariables = jobToCopy.EnvironmentVariables?.Clone();
if (jobToCopy.m_demands != null && jobToCopy.m_demands.Count > 0)
{
m_demands = new List<Demand>(jobToCopy.m_demands.Select(x => x.Clone()));
}
if (jobToCopy.m_steps != null && jobToCopy.m_steps.Count > 0)
{
m_steps = new List<JobStep>(jobToCopy.m_steps.Select(x => x.Clone() as JobStep));
}
if (jobToCopy.m_variables != null && jobToCopy.m_variables.Count > 0)
{
m_variables = new List<IVariable>(jobToCopy.m_variables);
}
if (jobToCopy.m_sidecarContainers != null && jobToCopy.m_sidecarContainers.Count > 0)
{
m_sidecarContainers = new Dictionary<String, String>(jobToCopy.m_sidecarContainers, StringComparer.OrdinalIgnoreCase);
}
}
[DataMember]
public Guid Id
{
get;
set;
}
[DataMember]
public String Name
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public String DisplayName
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken Container
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken ServiceContainers
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public Boolean ContinueOnError
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken EnvironmentVariables
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public Int32 TimeoutInMinutes
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public Int32 CancelTimeoutInMinutes
{
get;
set;
}
public IList<Demand> Demands
{
get
{
if (m_demands == null)
{
m_demands = new List<Demand>();
}
return m_demands;
}
}
[DataMember(EmitDefaultValue = false)]
public IdentityRef ExecuteAs
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public WorkspaceOptions Workspace
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public PhaseTarget Target
{
get;
set;
}
public IList<JobStep> Steps
{
get
{
if (m_steps == null)
{
m_steps = new List<JobStep>();
}
return m_steps;
}
}
public IList<ContextScope> Scopes
{
get
{
if (m_scopes == null)
{
m_scopes = new List<ContextScope>();
}
return m_scopes;
}
}
public IDictionary<String, String> SidecarContainers
{
get
{
if (m_sidecarContainers == null)
{
m_sidecarContainers = new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
}
return m_sidecarContainers;
}
}
public IList<IVariable> Variables
{
get
{
if (m_variables == null)
{
m_variables = new List<IVariable>();
}
return m_variables;
}
}
public Job Clone()
{
return new Job(this);
}
/// <summary>
/// Creates an instance of a task using the specified execution context.
/// </summary>
/// <param name="context">The job execution context</param>
/// <param name="taskName">The name of the task in the steps list</param>
/// <returns></returns>
public CreateTaskResult CreateTask(
JobExecutionContext context,
String taskName)
{
ArgumentUtility.CheckStringForNullOrEmpty(taskName, nameof(taskName));
TaskDefinition definition = null;
var task = this.Steps.SingleOrDefault(x => taskName.Equals(x.Name, StringComparison.OrdinalIgnoreCase))?.Clone() as TaskStep;
if (task != null)
{
definition = context.TaskStore.ResolveTask(task.Reference.Id, task.Reference.Version);
foreach (var input in definition.Inputs.Where(x => x != null))
{
var key = input.Name?.Trim() ?? String.Empty;
if (!String.IsNullOrEmpty(key))
{
if (!task.Inputs.ContainsKey(key))
{
task.Inputs[key] = input.DefaultValue?.Trim() ?? String.Empty;
}
}
}
// Now expand any macros which appear in inputs
foreach (var input in task.Inputs.ToArray())
{
task.Inputs[input.Key] = context.ExpandVariables(input.Value);
}
// Set the system variables populated while running an individual task
context.Variables[WellKnownDistributedTaskVariables.TaskInstanceId] = task.Id.ToString("D");
context.Variables[WellKnownDistributedTaskVariables.TaskDisplayName] = task.DisplayName ?? task.Name;
context.Variables[WellKnownDistributedTaskVariables.TaskInstanceName] = task.Name;
}
return new CreateTaskResult(task, definition);
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_demands?.Count == 0)
{
m_demands = null;
}
if (m_steps?.Count == 0)
{
m_steps = null;
}
if (m_scopes?.Count == 0)
{
m_scopes = null;
}
if (m_variables?.Count == 0)
{
m_variables = null;
}
}
[DataMember(Name = "Demands", EmitDefaultValue = false)]
private List<Demand> m_demands;
[DataMember(Name = "Steps", EmitDefaultValue = false)]
private List<JobStep> m_steps;
[DataMember(Name = "Scopes", EmitDefaultValue = false)]
private List<ContextScope> m_scopes;
[DataMember(Name = "Variables", EmitDefaultValue = false)]
private List<IVariable> m_variables;
[DataMember(Name = "SidecarContainers", EmitDefaultValue = false)]
private IDictionary<String, String> m_sidecarContainers;
}
}

View File

@@ -1,106 +0,0 @@
using System;
using System.Collections.Generic;
namespace GitHub.DistributedTask.Pipelines
{
public class JobExpansionOptions
{
public JobExpansionOptions(ICollection<String> configurations)
{
AddConfigurations(configurations);
}
internal JobExpansionOptions(IDictionary<String, Int32> configurations)
{
UpdateConfigurations(configurations);
}
internal JobExpansionOptions(
String configuration,
Int32 attemptNumber = NoSpecifiedAttemptNumber)
{
if (!String.IsNullOrEmpty(configuration))
{
this.Configurations.Add(configuration, attemptNumber);
}
}
/// <summary>
/// Specifies a filter for the expansion of specific Phase configurations.
/// The key is the configuration name, the value is the explicitly requested
/// attempt number.
/// If mapping is null, there is no filter and all configurations will be
/// produced.
/// </summary>
internal IDictionary<String, Int32> Configurations
{
get
{
if (m_configurations == null)
{
m_configurations = new Dictionary<String, Int32>(StringComparer.OrdinalIgnoreCase);
}
return m_configurations;
}
}
public Boolean IsIncluded(String configuration)
{
return m_configurations == null || m_configurations.ContainsKey(configuration);
}
/// <summary>
/// Add new configurations, with no specified custom attempt number
/// </summary>
public void AddConfigurations(ICollection<String> configurations)
{
if (configurations == null)
{
return;
}
var localConfigs = this.Configurations;
foreach (var c in configurations)
{
if (!localConfigs.ContainsKey(c))
{
localConfigs[c] = NoSpecifiedAttemptNumber;
}
}
}
/// <summary>
/// add (or replace) any configurations and their associated attempt numbers with new provided values.
/// </summary>
public void UpdateConfigurations(IDictionary<String, Int32> configurations)
{
if (configurations == null)
{
return;
}
var localConfigs = this.Configurations;
foreach (var pair in configurations)
{
localConfigs[pair.Key] = pair.Value;
}
}
/// <summary>
/// returns custom attempt number or JobExpansionOptions.NoSpecifiedAttemptNumber if none specified.
/// </summary>
/// <param name="configuration">configuration or "job name"</param>
public Int32 GetAttemptNumber(String configuration)
{
if (m_configurations != null && m_configurations.TryGetValue(configuration, out Int32 number))
{
return number;
}
return NoSpecifiedAttemptNumber;
}
public const Int32 NoSpecifiedAttemptNumber = -1;
private Dictionary<String, Int32> m_configurations;
}
}

View File

@@ -1,480 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.Pipelines.Runtime;
using GitHub.DistributedTask.Pipelines.Validation;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class JobFactory : PhaseNode
{
public JobFactory()
{
}
private JobFactory(JobFactory copy)
: base(copy)
{
if (copy.m_steps != null && copy.m_steps.Count > 0)
{
m_steps = new List<Step>(copy.m_steps.Select(x => x.Clone()));
}
}
/// <summary>
/// Gets the phase type.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public override PhaseType Type => PhaseType.JobFactory;
public IList<ContextScope> Scopes
{
get
{
if (m_scopes == null)
{
m_scopes = new List<ContextScope>();
}
return m_scopes;
}
}
/// <summary>
/// Gets the list of steps associated with this phase. At runtime the steps will be used as a template for
/// the execution of a job.
/// </summary>
public IList<Step> Steps
{
get
{
if (m_steps == null)
{
m_steps = new List<Step>();
}
return m_steps;
}
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken Strategy
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public ScalarToken JobDisplayName
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken JobTarget
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public ScalarToken JobTimeout
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public ScalarToken JobCancelTimeout
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken JobContainer
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken JobServiceContainers
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken EnvironmentVariables
{
get;
set;
}
public void CheckExpandReferences(
out bool isEventReferenced,
out bool isOutputsReferenced)
{
isEventReferenced = false;
isOutputsReferenced = false;
var expressionTokens = Strategy.Traverse()
.Concat(JobDisplayName.Traverse())
.Concat(JobTarget.Traverse())
.Concat(JobTimeout.Traverse())
.Concat(JobCancelTimeout.Traverse())
.OfType<BasicExpressionToken>()
.ToArray();
var parser = new ExpressionParser();
foreach (var expressionToken in expressionTokens)
{
var tree = parser.ValidateSyntax(expressionToken.Expression, null);
var isReferenced = tree.CheckReferencesContext(
PipelineTemplateConstants.EventPattern,
PipelineTemplateConstants.OutputsPattern);
if (!isEventReferenced)
{
isEventReferenced = isReferenced[0];
}
if (!isOutputsReferenced)
{
isOutputsReferenced = isReferenced[1];
}
}
}
public ExpandPhaseResult Expand(
PhaseExecutionContext context,
JobExpansionOptions options = null)
{
var result = new ExpandPhaseResult();
var trace = new JobFactoryTrace(context.Trace);
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
var templateEvaluator = new PipelineTemplateEvaluator(trace, schema);
trace.Info("Evaluating strategy");
var displayName = JobDisplayName is ExpressionToken ? null : DisplayName;
var strategy = templateEvaluator.EvaluateStrategy(Strategy, context.Data, displayName);
foreach (var jobContext in ExpandContexts(context, options, strategy, trace, templateEvaluator))
{
result.Jobs.Add(jobContext.Job);
}
if (strategy.MaxParallel > 0)
{
result.MaxConcurrency = strategy.MaxParallel;
}
else
{
result.MaxConcurrency = result.Jobs.Count;
}
result.FailFast = strategy.FailFast;
return result;
}
public IEnumerable<JobExecutionContext> ExpandContexts(
PhaseExecutionContext context,
JobExpansionOptions options = null,
StrategyResult strategy = null,
DistributedTask.ObjectTemplating.ITraceWriter trace = null,
PipelineTemplateEvaluator templateEvaluator = null)
{
if (trace == null)
{
trace = new JobFactoryTrace(context.Trace);
}
if (templateEvaluator == null)
{
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
templateEvaluator = new PipelineTemplateEvaluator(trace, schema);
}
// Strategy
if (strategy == null)
{
trace.Info("Evaluating strategy");
var displayName = JobDisplayName is ExpressionToken ? null : DisplayName;
strategy = templateEvaluator.EvaluateStrategy(Strategy, context.Data, displayName);
}
// Check max jobs
var maxJobs = context.ExecutionOptions.MaxJobExpansion ?? 100;
if (strategy.Configurations.Count > maxJobs)
{
throw new MaxJobExpansionException($"Strategy produced more than {maxJobs}");
}
// Create jobs
for (var i = 0; i < strategy.Configurations.Count; i++)
{
var configuration = strategy.Configurations[i];
var jobName = configuration.Name;
var attempt = 1;
if (options?.Configurations.Count > 0)
{
if (!options.Configurations.TryGetValue(jobName, out attempt))
{
continue;
}
}
yield return CreateJob(trace, context, templateEvaluator, jobName, configuration.DisplayName, attempt, i + 1, strategy.Configurations.Count, configuration.ContextData);
}
}
/// <summary>
/// Resolves external references and ensures the steps are compatible with the selected target.
/// </summary>
/// <param name="context">The validation context</param>
public override void Validate(
PipelineBuildContext context,
ValidationResult result)
{
base.Validate(context, result);
var phaseStepValidationResult = new Phase.StepValidationResult();
// Require the latest agent version.
if (context.BuildOptions.DemandLatestAgent)
{
var latestPackageVersion = context.PackageStore?.GetLatestVersion(WellKnownPackageTypes.Agent);
if (latestPackageVersion == null)
{
throw new NotSupportedException("Unable to determine the latest agent package version");
}
phaseStepValidationResult.MinAgentVersion = latestPackageVersion.ToString();
}
Phase.ValidateSteps(context, this, new AgentQueueTarget(), result, Steps, phaseStepValidationResult);
// Resolve the target to ensure we have stable identifiers for the orchestration engine
// phase targets with expressions need to be evaluated against resolved job contexts.
bool validateTarget = false;
if (this.Target.Type == PhaseTargetType.Pool || this.Target.Type == PhaseTargetType.Server)
{
validateTarget = true;
}
else if (this.Target is AgentQueueTarget agentQueueTarget && agentQueueTarget.IsLiteral())
{
validateTarget = true;
}
if (validateTarget)
{
this.Target.Validate(
context,
context.BuildOptions,
result,
this.Steps,
phaseStepValidationResult.TaskDemands);
}
}
private JobExecutionContext CreateJob(
DistributedTask.ObjectTemplating.ITraceWriter trace,
PhaseExecutionContext phaseContext,
PipelineTemplateEvaluator templateEvaluator,
String jobName,
String configurationDisplayName,
Int32 attempt,
Int32 positionInPhase,
Int32 totalJobsInPhase,
IDictionary<String, PipelineContextData> contextData)
{
trace.Info($"Creating job '{jobName}'");
var jobContext = new JobExecutionContext(
context: phaseContext,
job: new JobInstance(jobName, attempt),
variables: null,
positionInPhase: positionInPhase,
totalJobsInPhase: totalJobsInPhase,
data: contextData);
var job = new Job
{
Id = jobContext.GetInstanceId(),
Name = jobContext.Job.Name,
EnvironmentVariables = EnvironmentVariables,
Container = JobContainer,
ServiceContainers = JobServiceContainers,
};
if (JobDisplayName is ExpressionToken)
{
trace.Info("Evaluating display name");
job.DisplayName = templateEvaluator.EvaluateJobDisplayName(JobDisplayName, jobContext.Data, DisplayName);
}
else if (!String.IsNullOrEmpty(configurationDisplayName))
{
job.DisplayName = configurationDisplayName;
}
else
{
job.DisplayName = DisplayName;
}
trace.Info("Evaluating timeout");
job.TimeoutInMinutes = templateEvaluator.EvaluateJobTimeout(JobTimeout, jobContext.Data);
trace.Info("Evaluating cancel timeout");
job.CancelTimeoutInMinutes = templateEvaluator.EvaluateJobCancelTimeout(JobCancelTimeout, jobContext.Data);
trace.Info("Evaluating target");
job.Target = templateEvaluator.EvaluateJobTarget(JobTarget, jobContext.Data);
jobContext.Job.Definition = job;
// Resolve the pool by name
if (job.Target is AgentPoolTarget pool &&
pool.Pool?.Id == 0 &&
!String.IsNullOrEmpty(pool.Pool.Name?.Literal))
{
var resolved = jobContext.ResourceStore.GetPool(pool.Pool.Name.Literal);
if (resolved != null)
{
pool.Pool = new AgentPoolReference { Id = resolved.Id, Name = resolved.Name };
}
}
// Resolve the queue by name
if (job.Target is AgentQueueTarget queue &&
queue.Queue?.Id == 0 &&
!String.IsNullOrEmpty(queue.Queue.Name?.Literal))
{
var resolved = jobContext.ResourceStore.GetQueue(queue.Queue.Name.Literal);
if (resolved != null)
{
queue.Queue = new AgentQueueReference { Id = resolved.Id, Name = resolved.Name };
}
}
// Always add self
var self = jobContext.ResourceStore?.Repositories.Get(PipelineConstants.SelfAlias);
if (self == null)
{
throw new InvalidOperationException($"Repository '{PipelineConstants.SelfAlias}' not found");
}
jobContext.ReferencedResources.Repositories.Add(self);
// Add the endpoint
if (self.Endpoint != null)
{
jobContext.ReferencedResources.AddEndpointReference(self.Endpoint);
var repositoryEndpoint = jobContext.ResourceStore?.GetEndpoint(self.Endpoint);
if (repositoryEndpoint == null)
{
throw new ResourceNotFoundException(PipelineStrings.ServiceEndpointNotFound(self.Endpoint));
}
}
// Update the execution context with the job-specific system variables
UpdateJobContextVariablesFromJob(jobContext, job);
var steps = new List<JobStep>();
var identifier = jobContext.GetInstanceName();
foreach (var step in Steps)
{
if (step.Type == StepType.Action)
{
job.Steps.Add(Phase.CreateJobActionStep(jobContext, identifier, step as ActionStep));
}
else
{
throw new NotSupportedException($"Unexpected step type '{step.Type}'");
}
}
foreach (var scope in Scopes)
{
job.Scopes.Add(scope);
}
return jobContext;
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_scopes?.Count == 0)
{
m_scopes = null;
}
if (m_steps?.Count == 0)
{
m_steps = null;
}
}
private sealed class JobFactoryTrace : DistributedTask.ObjectTemplating.ITraceWriter
{
public JobFactoryTrace(DistributedTask.Expressions2.ITraceWriter trace)
{
m_trace = trace;
}
public void Error(
String message,
params Object[] args)
{
Info("##[error]", message, args);
}
public void Info(
String message,
params Object[] args)
{
Info(String.Empty, message, args);
}
public void Verbose(
String message,
params Object[] args)
{
Info("##[debug]", message, args);
}
private void Info(
String prefix,
String message,
params Object[] args)
{
if (m_trace == null)
{
return;
}
if (args?.Length > 0)
{
m_trace.Info(String.Format(CultureInfo.InvariantCulture, $"{prefix}{message}", args));
}
else
{
m_trace.Info($"{prefix}{message}");
}
}
private DistributedTask.Expressions2.ITraceWriter m_trace;
}
[DataMember(Name = "Scopes", EmitDefaultValue = false)]
private IList<ContextScope> m_scopes;
[DataMember(Name = "Steps", EmitDefaultValue = false)]
private IList<Step> m_steps;
}
}

View File

@@ -54,21 +54,6 @@ namespace GitHub.DistributedTask.Pipelines
}
}
/// <summary>
/// Gets the collection of secure files associated with the current job
/// </summary>
public List<SecureFile> SecureFiles
{
get
{
if (m_secureFiles == null)
{
m_secureFiles = new List<SecureFile>();
}
return m_secureFiles;
}
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
@@ -86,11 +71,6 @@ namespace GitHub.DistributedTask.Pipelines
{
m_repositories = null;
}
if (m_secureFiles?.Count == 0)
{
m_secureFiles = null;
}
}
[DataMember(Name = "Containers", EmitDefaultValue = false)]
@@ -101,8 +81,5 @@ namespace GitHub.DistributedTask.Pipelines
[DataMember(Name = "Repositories", EmitDefaultValue = false)]
private List<RepositoryResource> m_repositories;
[DataMember(Name = "SecureFiles", EmitDefaultValue = false)]
private List<SecureFile> m_secureFiles;
}
}

View File

@@ -1,24 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.Pipelines.Runtime;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Named-value node used when evaluating graph-node conditions
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
internal sealed class GraphConditionNamedValue<TInstance> : NamedValue where TInstance : IGraphNodeInstance
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
var graphContext = context.State as GraphExecutionContext<TInstance>;
graphContext.Data.TryGetValue(Name, out var result);
return result;
}
}
}

View File

@@ -1,13 +0,0 @@
using System;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IFileProvider
{
String GetFileContent(String path);
String ResolvePath(String defaultRoot, String path);
}
}

View File

@@ -1,59 +0,0 @@
using System;
using System.Collections.Generic;
using System.Globalization;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class JobDisplayNameBuilder
{
public JobDisplayNameBuilder(String jobFactoryDisplayName)
{
if (!String.IsNullOrEmpty(jobFactoryDisplayName))
{
m_jobFactoryDisplayName = jobFactoryDisplayName;
m_segments = new List<String>();
}
}
public void AppendSegment(String value)
{
if (String.IsNullOrEmpty(value) || m_segments == null)
{
return;
}
m_segments.Add(value);
}
public String Build()
{
if (String.IsNullOrEmpty(m_jobFactoryDisplayName))
{
return null;
}
var displayName = default(String);
if (m_segments.Count == 0)
{
displayName = m_jobFactoryDisplayName;
}
else
{
var joinedSegments = String.Join(", ", m_segments);
displayName = String.Format(CultureInfo.InvariantCulture, "{0} ({1})", m_jobFactoryDisplayName, joinedSegments);
}
const Int32 maxDisplayNameLength = 100;
if (displayName.Length > maxDisplayNameLength)
{
displayName = displayName.Substring(0, maxDisplayNameLength - 3) + "...";
}
m_segments.Clear();
return displayName;
}
private readonly String m_jobFactoryDisplayName;
private readonly List<String> m_segments;
}
}

View File

@@ -1,445 +0,0 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class MatrixBuilder
{
internal MatrixBuilder(
TemplateContext context,
String jobFactoryDisplayName)
{
m_context = context;
m_jobFactoryDisplayName = jobFactoryDisplayName;
}
internal void AddVector(
String name,
SequenceToken vector)
{
m_vectors.Add(name, vector.ToContextData());
}
internal DictionaryContextData Vectors => m_vectors;
internal void Exclude(SequenceToken exclude)
{
m_excludeSequence = exclude;
}
internal void Include(SequenceToken include)
{
m_includeSequence = include;
}
internal IEnumerable<StrategyConfiguration> Build()
{
if (m_vectors.Count > 0)
{
// Parse includes/excludes
var include = new MatrixInclude(m_context, m_vectors, m_includeSequence);
var exclude = new MatrixExclude(m_context, m_vectors, m_excludeSequence);
// Calculate the cross product size
var productSize = 1;
foreach (var vectorPair in m_vectors)
{
checked
{
var vector = vectorPair.Value.AssertArray("vector");
productSize *= vector.Count;
}
}
var nameBuilder = new ReferenceNameBuilder();
var displayNameBuilder = new JobDisplayNameBuilder(m_jobFactoryDisplayName);
// Cross product
for (var productIndex = 0; productIndex < productSize; productIndex++)
{
// Matrix
var matrix = new DictionaryContextData();
var blockSize = productSize;
foreach (var vectorPair in m_vectors)
{
var vectorName = vectorPair.Key;
var vector = vectorPair.Value.AssertArray("vector");
blockSize = blockSize / vector.Count;
var vectorIndex = (productIndex / blockSize) % vector.Count;
matrix.Add(vectorName, vector[vectorIndex]);
}
// Exclude
if (exclude.Match(matrix))
{
continue;
}
// New configuration
var configuration = new StrategyConfiguration();
m_context.Memory.AddBytes(TemplateMemory.MinObjectSize);
// Gather segments for name and display name
foreach (var matrixData in matrix.Traverse(omitKeys: true))
{
var segment = default(String);
switch (matrixData?.Type)
{
case PipelineContextDataType.Boolean:
case PipelineContextDataType.Number:
case PipelineContextDataType.String:
segment = matrixData.ToString();
break;
}
if (!String.IsNullOrEmpty(segment))
{
// Name segment
nameBuilder.AppendSegment(segment);
// Display name segment
displayNameBuilder.AppendSegment(segment);
}
}
// Name
configuration.Name = nameBuilder.Build();
m_context.Memory.AddBytes(configuration.Name);
// Display name
configuration.DisplayName = displayNameBuilder.Build();
m_context.Memory.AddBytes(configuration.DisplayName);
// Include
if (include.Match(matrix, out var extra))
{
matrix.Add(extra);
}
// Matrix context
configuration.ContextData.Add(PipelineTemplateConstants.Matrix, matrix);
m_context.Memory.AddBytes(PipelineTemplateConstants.Matrix);
m_context.Memory.AddBytes(matrix, traverse: true);
// Add configuration
yield return configuration;
}
}
}
private sealed class MatrixInclude
{
public MatrixInclude(
TemplateContext context,
DictionaryContextData vectors,
SequenceToken includeSequence)
{
// Convert to excludes sets
if (includeSequence?.Count > 0)
{
foreach (var includeItem in includeSequence)
{
var includeMapping = includeItem.AssertMapping("matrix includes item");
// Distinguish filters versus extra
var filter = new MappingToken(null, null, null);
var extra = new DictionaryContextData();
foreach (var includePair in includeMapping)
{
var includeKeyLiteral = includePair.Key.AssertString("matrix include item key");
if (vectors.ContainsKey(includeKeyLiteral.Value))
{
filter.Add(includeKeyLiteral, includePair.Value);
}
else
{
extra.Add(includeKeyLiteral.Value, includePair.Value.ToContextData());
}
}
// At least one filter
if (filter.Count == 0)
{
context.Error(includeMapping, $"Matrix include mapping does not contain any filters");
continue;
}
// At least one extra
if (extra.Count == 0)
{
context.Error(includeMapping, $"Matrix include mapping does not contain any extra values to include");
continue;
}
// Add filter
m_filters.Add(new MatrixIncludeFilter(filter, extra));
}
}
}
public Boolean Match(
DictionaryContextData matrix,
out DictionaryContextData extra)
{
extra = default(DictionaryContextData);
foreach (var filter in m_filters)
{
if (filter.Match(matrix, out var items))
{
if (extra == null)
{
extra = new DictionaryContextData();
}
foreach (var pair in items)
{
extra[pair.Key] = pair.Value;
}
}
}
return extra != null;
}
private readonly List<MatrixIncludeFilter> m_filters = new List<MatrixIncludeFilter>();
}
private sealed class MatrixIncludeFilter : MatrixFilter
{
public MatrixIncludeFilter(
MappingToken filter,
DictionaryContextData extra)
: base(filter)
{
m_extra = extra;
}
public Boolean Match(
DictionaryContextData matrix,
out DictionaryContextData extra)
{
if (base.Match(matrix))
{
extra = m_extra;
return true;
}
extra = null;
return false;
}
private readonly DictionaryContextData m_extra;
}
private sealed class MatrixExclude
{
public MatrixExclude(
TemplateContext context,
DictionaryContextData vectors,
SequenceToken excludeSequence)
{
// Convert to excludes sets
if (excludeSequence?.Count > 0)
{
foreach (var excludeItem in excludeSequence)
{
var excludeMapping = excludeItem.AssertMapping("matrix excludes item");
// Check empty
if (excludeMapping.Count == 0)
{
context.Error(excludeMapping, $"Matrix exclude filter must not be empty");
continue;
}
// Validate first-level keys
foreach (var excludePair in excludeMapping)
{
var excludeKey = excludePair.Key.AssertString("matrix excludes item key");
if (!vectors.ContainsKey(excludeKey.Value))
{
context.Error(excludeKey, $"Matrix exclude key '{excludeKey.Value}' does not match any key within the matrix");
continue;
}
}
// Add filter
m_filters.Add(new MatrixExcludeFilter(excludeMapping));
}
}
}
public Boolean Match(DictionaryContextData matrix)
{
foreach (var filter in m_filters)
{
if (filter.Match(matrix))
{
return true;
}
}
return false;
}
private readonly List<MatrixExcludeFilter> m_filters = new List<MatrixExcludeFilter>();
}
private sealed class MatrixExcludeFilter : MatrixFilter
{
public MatrixExcludeFilter(MappingToken filter)
: base(filter)
{
}
public new Boolean Match(DictionaryContextData matrix)
{
return base.Match(matrix);
}
}
private abstract class MatrixFilter
{
protected MatrixFilter(MappingToken matrixFilter)
{
var state = new MappingState(null, matrixFilter);
while (state != null)
{
if (state.MoveNext())
{
var value = state.Mapping[state.Index].Value;
if (value is LiteralToken literal)
{
AddExpression(state, literal);
}
else
{
var mapping = state.Mapping[state.Index].Value.AssertMapping("matrix filter");
state = new MappingState(state, mapping);
}
}
else
{
state = state.Parent;
}
}
}
protected Boolean Match(DictionaryContextData matrix)
{
if (matrix.Count == 0)
{
throw new InvalidOperationException("Matrix filter cannot be empty");
}
foreach (var expression in m_expressions)
{
var result = expression.Evaluate(null, null, matrix, null);
if (result.IsFalsy)
{
return false;
}
}
return true;
}
private void AddExpression(
MappingState state,
LiteralToken literal)
{
var expressionLiteral = default(String);
switch (literal.Type)
{
case TokenType.Null:
expressionLiteral = ExpressionConstants.Null;
break;
case TokenType.Boolean:
var booleanToken = literal as BooleanToken;
expressionLiteral = booleanToken.Value ? ExpressionConstants.True : ExpressionConstants.False;
break;
case TokenType.Number:
var numberToken = literal as NumberToken;
expressionLiteral = String.Format(CultureInfo.InvariantCulture, ExpressionConstants.NumberFormat, numberToken.Value);
break;
case TokenType.String:
var stringToken = literal as StringToken;
expressionLiteral = $"'{ExpressionUtility.StringEscape(stringToken.Value)}'";
break;
default:
throw new NotSupportedException($"Unexpected literal type '{literal.Type}'");
}
var str = $"{state.Path} == {expressionLiteral}";
var parser = new ExpressionParser();
var expression = parser.CreateTree(str, null, s_matrixFilterNamedValues, null);
m_expressions.Add(expression);
}
private static readonly INamedValueInfo[] s_matrixFilterNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<MatrixNamedValue>(PipelineTemplateConstants.Matrix),
};
private readonly List<IExpressionNode> m_expressions = new List<IExpressionNode>();
}
private sealed class MappingState
{
public MappingState(
MappingState parent,
MappingToken mapping)
{
Parent = parent;
Mapping = mapping;
Index = -1;
}
public Boolean MoveNext()
{
if (++Index < Mapping.Count)
{
var keyLiteral = Mapping[Index].Key.AssertString("matrix filter key");
var parentPath = Parent?.Path ?? PipelineTemplateConstants.Matrix;
Path = $"{parentPath}['{ExpressionUtility.StringEscape(keyLiteral.Value)}']";
return true;
}
else
{
return false;
}
}
public MappingState Parent;
public MappingToken Mapping;
public Int32 Index;
public String Path;
}
private sealed class MatrixNamedValue : NamedValue
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
return context.State;
}
}
private readonly TemplateContext m_context;
private readonly String m_jobFactoryDisplayName;
private readonly DictionaryContextData m_vectors = new DictionaryContextData();
private SequenceToken m_excludeSequence;
private SequenceToken m_includeSequence;
}
}

View File

@@ -1,45 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Reflection;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class ParseOptions
{
public ParseOptions()
{
}
internal ParseOptions(ParseOptions copy)
{
MaxFiles = copy.MaxFiles;
MaxFileSize = copy.MaxFileSize;
MaxResultSize = copy.MaxResultSize;
}
public Int32 MaxDepth => 50;
/// <summary>
/// Gets the maximum error message length before the message will be truncated.
/// </summary>
public Int32 MaxErrorMessageLength => 500;
/// <summary>
/// Gets the maximum number of errors that can be recorded when parsing a pipeline.
/// </summary>
public Int32 MaxErrors => 10;
/// <summary>
/// Gets or sets the maximum number of files that can be loaded when parsing a pipeline. Zero or less is treated as infinite.
/// </summary>
public Int32 MaxFiles { get; set; } = 50;
public Int32 MaxFileSize { get; set; } = 1024 * 1024; // 1 mb
public Int32 MaxParseEvents => 1000000; // 1 million
public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb
}
}

View File

@@ -1,30 +0,0 @@
using System;
using System.IO;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class ParseResult
{
public TemplateContext Context { get; set; }
public TemplateToken Value { get; set; }
public String ToYaml()
{
if (Value == null)
{
return null;
}
// Serialize
using (var stringWriter = new StringWriter())
{
TemplateWriter.Write(new YamlObjectWriter(stringWriter), Value);
stringWriter.Flush();
return stringWriter.ToString();
}
}
}
}

View File

@@ -16,180 +16,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal static class PipelineTemplateConverter
{
internal static PipelineTemplate ConvertToPipeline(
TemplateContext context,
RepositoryResource self,
TemplateToken pipeline)
{
var result = new PipelineTemplate();
result.Resources.Repositories.Add(self);
var defaultStage = new Stage
{
Name = PipelineConstants.DefaultJobName,
};
result.Stages.Add(defaultStage);
try
{
if (pipeline == null || context.Errors.Count > 0)
{
return result;
}
var pipelineMapping = pipeline.AssertMapping("root");
foreach (var pipelinePair in pipelineMapping)
{
var pipelineKey = pipelinePair.Key.AssertString("root key");
switch (pipelineKey.Value)
{
case PipelineTemplateConstants.On:
break;
case PipelineTemplateConstants.Name:
break;
case PipelineTemplateConstants.Env:
result.EnvironmentVariables = pipelinePair.Value;
break;
case PipelineTemplateConstants.Jobs:
defaultStage.Phases.AddRange(ConvertToJobFactories(context, result.Resources, pipelinePair.Value));
break;
default:
pipelineKey.AssertUnexpectedValue("root key"); // throws
break;
}
}
}
catch (Exception ex)
{
context.Errors.Add(ex);
}
finally
{
if (context.Errors.Count > 0)
{
foreach (var error in context.Errors)
{
result.Errors.Add(new PipelineValidationError(error.Code, error.Message));
}
}
}
return result;
}
internal static String ConvertToJobDisplayName(
TemplateContext context,
TemplateToken displayName,
Boolean allowExpressions = false)
{
var result = default(String);
// Expression
if (allowExpressions && displayName is ExpressionToken)
{
return result;
}
// String
var displayNameString = displayName.AssertString($"job {PipelineTemplateConstants.Name}");
result = displayNameString.Value;
return result;
}
internal static PhaseTarget ConvertToJobTarget(
TemplateContext context,
TemplateToken runsOn,
Boolean allowExpressions = false)
{
var result = new AgentPoolTarget();
// Expression
if (allowExpressions && runsOn is ExpressionToken)
{
return result;
}
// String
if (runsOn is StringToken runsOnString)
{
result.Pool = new AgentPoolReference { Name = "GitHub Actions" };
result.AgentSpecification = new JObject
{
{ PipelineTemplateConstants.VmImage, runsOnString.Value }
};
}
// Mapping
else
{
var runsOnMapping = runsOn.AssertMapping($"job {PipelineTemplateConstants.RunsOn}");
foreach (var runsOnProperty in runsOnMapping)
{
// Expression
if (allowExpressions && runsOnProperty.Key is ExpressionToken)
{
continue;
}
// String
var propertyName = runsOnProperty.Key.AssertString($"job {PipelineTemplateConstants.RunsOn} key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Pool:
// Expression
if (allowExpressions && runsOnProperty.Value is ExpressionToken)
{
continue;
}
// Literal
var pool = runsOnProperty.Value.AssertString($"job {PipelineTemplateConstants.RunsOn} key");
result.Pool = new AgentPoolReference { Name = pool.Value };
break;
default:
propertyName.AssertUnexpectedValue($"job {PipelineTemplateConstants.RunsOn} key"); // throws
break;
}
}
}
return result;
}
internal static Int32? ConvertToJobTimeout(
TemplateContext context,
TemplateToken token,
Boolean allowExpressions = false)
{
if (allowExpressions && token is ExpressionToken)
{
return null;
}
var numberToken = token.AssertNumber($"job {PipelineTemplateConstants.TimeoutMinutes}");
return (Int32)numberToken.Value;
}
internal static Int32? ConvertToJobCancelTimeout(
TemplateContext context,
TemplateToken token,
Boolean allowExpressions = false)
{
if (allowExpressions && token is ExpressionToken)
{
return null;
}
var numberToken = token.AssertNumber($"job {PipelineTemplateConstants.CancelTimeoutMinutes}");
return (Int32)numberToken.Value;
}
internal static Boolean? ConvertToStepContinueOnError(
TemplateContext context,
TemplateToken token,
@@ -315,203 +141,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return (Int32)numberToken.Value;
}
internal static StrategyResult ConvertToStrategy(
TemplateContext context,
TemplateToken token,
String jobFactoryDisplayName,
Boolean allowExpressions = false)
{
var result = new StrategyResult();
// Expression
if (allowExpressions && token is ExpressionToken)
{
return result;
}
var strategyMapping = token.AssertMapping(PipelineTemplateConstants.Strategy);
var matrixBuilder = default(MatrixBuilder);
var hasExpressions = false;
foreach (var strategyPair in strategyMapping)
{
// Expression key
if (allowExpressions && strategyPair.Key is ExpressionToken)
{
hasExpressions = true;
continue;
}
// Literal key
var strategyKey = strategyPair.Key.AssertString("strategy key");
switch (strategyKey.Value)
{
// Fail-Fast
case PipelineTemplateConstants.FailFast:
if (allowExpressions && strategyPair.Value is ExpressionToken)
{
hasExpressions = true;
continue;
}
var failFastBooleanToken = strategyPair.Value.AssertBoolean($"strategy {PipelineTemplateConstants.FailFast}");
result.FailFast = failFastBooleanToken.Value;
break;
// Max-Parallel
case PipelineTemplateConstants.MaxParallel:
if (allowExpressions && strategyPair.Value is ExpressionToken)
{
hasExpressions = true;
continue;
}
var maxParallelNumberToken = strategyPair.Value.AssertNumber($"strategy {PipelineTemplateConstants.MaxParallel}");
result.MaxParallel = (Int32)maxParallelNumberToken.Value;
break;
// Matrix
case PipelineTemplateConstants.Matrix:
// Expression
if (allowExpressions && strategyPair.Value is ExpressionToken)
{
hasExpressions = true;
continue;
}
var matrix = strategyPair.Value.AssertMapping("matrix");
hasExpressions = hasExpressions || matrix.Traverse().Any(x => x is ExpressionToken);
matrixBuilder = new MatrixBuilder(context, jobFactoryDisplayName);
var hasVector = false;
foreach (var matrixPair in matrix)
{
// Expression key
if (allowExpressions && matrixPair.Key is ExpressionToken)
{
hasVector = true; // For validation, treat as if a vector is defined
continue;
}
var matrixKey = matrixPair.Key.AssertString("matrix key");
switch (matrixKey.Value)
{
case PipelineTemplateConstants.Include:
if (allowExpressions && matrixPair.Value is ExpressionToken)
{
continue;
}
var includeSequence = matrixPair.Value.AssertSequence("matrix includes");
matrixBuilder.Include(includeSequence);
break;
case PipelineTemplateConstants.Exclude:
if (allowExpressions && matrixPair.Value is ExpressionToken)
{
continue;
}
var excludeSequence = matrixPair.Value.AssertSequence("matrix excludes");
matrixBuilder.Exclude(excludeSequence);
break;
default:
hasVector = true;
if (allowExpressions && matrixPair.Value is ExpressionToken)
{
continue;
}
var vectorName = matrixKey.Value;
var vectorSequence = matrixPair.Value.AssertSequence("matrix vector value");
if (vectorSequence.Count == 0)
{
context.Error(vectorSequence, $"Matrix vector '{vectorName}' does not contain any values");
}
else
{
matrixBuilder.AddVector(vectorName, vectorSequence);
}
break;
}
}
if (!hasVector)
{
context.Error(matrix, $"Matrix must defined at least one vector");
}
break;
default:
strategyKey.AssertUnexpectedValue("strategy key"); // throws
break;
}
}
if (hasExpressions)
{
return result;
}
if (matrixBuilder != null)
{
result.Configurations.AddRange(matrixBuilder.Build());
}
for (var i = 0; i < result.Configurations.Count; i++)
{
var configuration = result.Configurations[i];
var strategy = new DictionaryContextData()
{
{
"fail-fast",
new BooleanContextData(result.FailFast)
},
{
"job-index",
new NumberContextData(i)
},
{
"job-total",
new NumberContextData(result.Configurations.Count)
}
};
if (result.MaxParallel > 0)
{
strategy.Add(
"max-parallel",
new NumberContextData(result.MaxParallel)
);
}
else
{
strategy.Add(
"max-parallel",
new NumberContextData(result.Configurations.Count)
);
}
configuration.ContextData.Add(PipelineTemplateConstants.Strategy, strategy);
context.Memory.AddBytes(PipelineTemplateConstants.Strategy);
context.Memory.AddBytes(strategy, traverse: true);
if (!configuration.ContextData.ContainsKey(PipelineTemplateConstants.Matrix))
{
configuration.ContextData.Add(PipelineTemplateConstants.Matrix, null);
context.Memory.AddBytes(PipelineTemplateConstants.Matrix);
}
}
return result;
}
internal static JobContainer ConvertToJobContainer(
TemplateContext context,
TemplateToken value,
@@ -616,532 +245,5 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
private static IEnumerable<PhaseNode> ConvertToJobFactories(
TemplateContext context,
PipelineResources resources,
TemplateToken workflow)
{
var jobsMapping = workflow.AssertMapping(PipelineTemplateConstants.Jobs);
foreach (var jobsPair in jobsMapping)
{
var jobNameToken = jobsPair.Key.AssertString($"{PipelineTemplateConstants.Jobs} key");
if (!NameValidation.IsValid(jobNameToken.Value, true))
{
context.Error(jobNameToken, $"Job name {jobNameToken.Value} is invalid. Names must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'");
}
var result = new JobFactory
{
Name = jobNameToken.Value
};
var jobFactoryDefinition = jobsPair.Value.AssertMapping($"{PipelineTemplateConstants.Jobs} value");
foreach (var jobFactoryProperty in jobFactoryDefinition)
{
var propertyName = jobFactoryProperty.Key.AssertString($"job property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.ContinueOnError:
var continueOnErrorBooleanToken = jobFactoryProperty.Value.AssertBoolean($"job {PipelineTemplateConstants.ContinueOnError}");
result.ContinueOnError = continueOnErrorBooleanToken.Value;
break;
case PipelineTemplateConstants.If:
var ifCondition = jobFactoryProperty.Value.AssertString($"job {PipelineTemplateConstants.If}");
result.Condition = ConvertToIfCondition(context, ifCondition, true, true);
break;
case PipelineTemplateConstants.Name:
var displayName = jobFactoryProperty.Value.AssertScalar($"job {PipelineTemplateConstants.Name}");
ConvertToJobDisplayName(context, displayName, allowExpressions: true); // Validate early if possible
if (displayName is StringToken)
{
result.DisplayName = displayName.ToString();
}
else
{
result.JobDisplayName = displayName.Clone(true) as ExpressionToken;
}
break;
case PipelineTemplateConstants.Needs:
if (jobFactoryProperty.Value is StringToken needsLiteral)
{
result.DependsOn.Add(needsLiteral.Value);
}
else
{
var needs = jobFactoryProperty.Value.AssertSequence($"job {PipelineTemplateConstants.Needs}");
foreach (var needsItem in needs)
{
var need = needsItem.AssertString($"job {PipelineTemplateConstants.Needs} item");
result.DependsOn.Add(need.Value);
}
}
break;
case PipelineTemplateConstants.RunsOn:
ConvertToJobTarget(context, jobFactoryProperty.Value, allowExpressions: true); // Validate early if possible
result.JobTarget = jobFactoryProperty.Value.Clone(true);
break;
case PipelineTemplateConstants.Scopes:
foreach (var scope in ConvertToScopes(context, jobFactoryProperty.Value))
{
result.Scopes.Add(scope);
}
break;
case PipelineTemplateConstants.Steps:
result.Steps.AddRange(ConvertToSteps(context, jobFactoryProperty.Value));
break;
case PipelineTemplateConstants.Strategy:
ConvertToStrategy(context, jobFactoryProperty.Value, null, allowExpressions: true); // Validate early if possible
result.Strategy = jobFactoryProperty.Value.Clone(true);
break;
case PipelineTemplateConstants.TimeoutMinutes:
ConvertToJobTimeout(context, jobFactoryProperty.Value, allowExpressions: true); // Validate early if possible
result.JobTimeout = jobFactoryProperty.Value.Clone(true) as ScalarToken;
break;
case PipelineTemplateConstants.CancelTimeoutMinutes:
ConvertToJobCancelTimeout(context, jobFactoryProperty.Value, allowExpressions: true); // Validate early if possible
result.JobCancelTimeout = jobFactoryProperty.Value.Clone(true) as ScalarToken;
break;
case PipelineTemplateConstants.Container:
ConvertToJobContainer(context, jobFactoryProperty.Value, allowExpressions: true);
result.JobContainer = jobFactoryProperty.Value.Clone(true);
break;
case PipelineTemplateConstants.Services:
ConvertToJobServiceContainers(context, jobFactoryProperty.Value, allowExpressions: true);
result.JobServiceContainers = jobFactoryProperty.Value.Clone(true);
break;
case PipelineTemplateConstants.Env:
result.EnvironmentVariables = jobFactoryProperty.Value.Clone(true);
break;
default:
propertyName.AssertUnexpectedValue("job key"); // throws
break;
}
}
// todo: Move "required" support into schema validation
if (result.JobTarget == null)
{
context.Error(jobFactoryDefinition, $"The '{PipelineTemplateConstants.RunsOn}' property is required");
}
if (String.IsNullOrEmpty(result.DisplayName))
{
result.DisplayName = result.Name;
}
if (result.Scopes.Count > 0)
{
result.Steps.Insert(
0,
new ActionStep
{
Reference = new ScriptReference(),
DisplayName = "WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL",
Inputs = new MappingToken(null, null, null)
{
{
new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script),
new StringToken(null, null, null, "echo WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL")
}
}
});
result.Steps.Add(
new ActionStep
{
Reference = new ScriptReference(),
DisplayName = "WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL",
Inputs = new MappingToken(null, null, null)
{
{
new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script),
new StringToken(null, null, null, "echo WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL")
}
}
});
}
yield return result;
}
}
private static IEnumerable<ContextScope> ConvertToScopes(
TemplateContext context,
TemplateToken scopes)
{
var scopesSequence = scopes.AssertSequence($"job {PipelineTemplateConstants.Scopes}");
foreach (var scopesItem in scopesSequence)
{
var result = new ContextScope();
var scope = scopesItem.AssertMapping($"{PipelineTemplateConstants.Scopes} item");
foreach (var scopeProperty in scope)
{
var propertyName = scopeProperty.Key.AssertString($"{PipelineTemplateConstants.Scopes} item key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Name:
var nameLiteral = scopeProperty.Value.AssertString($"{PipelineTemplateConstants.Scopes} item {PipelineTemplateConstants.Name}");
result.Name = nameLiteral.Value;
break;
case PipelineTemplateConstants.Inputs:
result.Inputs = scopeProperty.Value.AssertMapping($"{PipelineTemplateConstants.Scopes} item {PipelineTemplateConstants.Inputs}");
break;
case PipelineTemplateConstants.Outputs:
result.Outputs = scopeProperty.Value.AssertMapping($"{PipelineTemplateConstants.Scopes} item {PipelineTemplateConstants.Outputs}");
break;
}
}
yield return result;
}
}
private static List<Step> ConvertToSteps(
TemplateContext context,
TemplateToken steps)
{
var stepsSequence = steps.AssertSequence($"job {PipelineTemplateConstants.Steps}");
var result = new List<Step>();
foreach (var stepsItem in stepsSequence)
{
var step = ConvertToStep(context, stepsItem);
if (step != null) // step = null means we are hitting error during step conversion, there should be an error in context.errors
{
if (step.Enabled)
{
result.Add(step);
}
}
}
return result;
}
private static ActionStep ConvertToStep(
TemplateContext context,
TemplateToken stepsItem)
{
var step = stepsItem.AssertMapping($"{PipelineTemplateConstants.Steps} item");
var continueOnError = default(ScalarToken);
var env = default(TemplateToken);
var id = default(StringToken);
var ifCondition = default(String);
var ifToken = default(StringToken);
var name = default(ScalarToken);
var run = default(ScalarToken);
var scope = default(StringToken);
var timeoutMinutes = default(ScalarToken);
var uses = default(StringToken);
var with = default(TemplateToken);
var workingDir = default(ScalarToken);
var path = default(ScalarToken);
var clean = default(ScalarToken);
var fetchDepth = default(ScalarToken);
var lfs = default(ScalarToken);
var submodules = default(ScalarToken);
var shell = default(ScalarToken);
foreach (var stepProperty in step)
{
var propertyName = stepProperty.Key.AssertString($"{PipelineTemplateConstants.Steps} item key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Clean:
clean = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Clean}");
break;
case PipelineTemplateConstants.ContinueOnError:
ConvertToStepContinueOnError(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
continueOnError = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} {PipelineTemplateConstants.ContinueOnError}");
break;
case PipelineTemplateConstants.Env:
ConvertToStepEnvironment(context, stepProperty.Value, StringComparer.Ordinal, allowExpressions: true); // Validate early if possible
env = stepProperty.Value;
break;
case PipelineTemplateConstants.FetchDepth:
fetchDepth = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.FetchDepth}");
break;
case PipelineTemplateConstants.Id:
id = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Id}");
if (!NameValidation.IsValid(id.Value, true))
{
context.Error(id, $"Step id {id.Value} is invalid. Ids must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'");
}
break;
case PipelineTemplateConstants.If:
ifToken = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.If}");
break;
case PipelineTemplateConstants.Lfs:
lfs = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Lfs}");
break;
case PipelineTemplateConstants.Name:
name = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Name}");
break;
case PipelineTemplateConstants.Path:
path = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Path}");
break;
case PipelineTemplateConstants.Run:
run = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Run}");
break;
case PipelineTemplateConstants.Shell:
shell = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Shell}");
break;
case PipelineTemplateConstants.Scope:
scope = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Scope}");
break;
case PipelineTemplateConstants.Submodules:
submodules = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Submodules}");
break;
case PipelineTemplateConstants.TimeoutMinutes:
ConvertToStepTimeout(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
timeoutMinutes = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.TimeoutMinutes}");
break;
case PipelineTemplateConstants.Uses:
uses = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
break;
case PipelineTemplateConstants.With:
ConvertToStepInputs(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
with = stepProperty.Value;
break;
case PipelineTemplateConstants.WorkingDirectory:
workingDir = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.WorkingDirectory}");
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Steps} item key"); // throws
break;
}
}
// Fixup the if-condition
var isDefaultScope = String.IsNullOrEmpty(scope?.Value);
ifCondition = ConvertToIfCondition(context, ifToken, false, isDefaultScope);
if (run != null)
{
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError?.Clone(true) as ScalarToken,
DisplayNameToken = name?.Clone(true) as ScalarToken,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes?.Clone(true) as ScalarToken,
Environment = env?.Clone(true),
Reference = new ScriptReference(),
};
var inputs = new MappingToken(null, null, null);
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), run.Clone(true));
if (workingDir != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.WorkingDirectory), workingDir.Clone(true));
}
if (shell != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Shell), shell.Clone(true));
}
result.Inputs = inputs;
return result;
}
else if (uses != null)
{
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError?.Clone(true) as ScalarToken,
DisplayNameToken = name?.Clone(true) as ScalarToken,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes?.Clone(true) as ScalarToken,
Inputs = with,
Environment = env,
};
if (uses.Value.StartsWith("docker://", StringComparison.Ordinal))
{
var image = uses.Value.Substring("docker://".Length);
result.Reference = new ContainerRegistryReference { Image = image };
}
else if (uses.Value.StartsWith("./") || uses.Value.StartsWith(".\\"))
{
result.Reference = new RepositoryPathReference
{
RepositoryType = PipelineConstants.SelfAlias,
Path = uses.Value
};
}
else
{
var usesSegments = uses.Value.Split('@');
var pathSegments = usesSegments[0].Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
var gitRef = usesSegments.Length == 2 ? usesSegments[1] : String.Empty;
if (usesSegments.Length != 2 ||
pathSegments.Length < 2 ||
String.IsNullOrEmpty(pathSegments[0]) ||
String.IsNullOrEmpty(pathSegments[1]) ||
String.IsNullOrEmpty(gitRef))
{
// todo: loc
context.Error(uses, $"Expected format {{org}}/{{repo}}[/path]@ref. Actual '{uses.Value}'");
}
else
{
var repositoryName = $"{pathSegments[0]}/{pathSegments[1]}";
var directoryPath = pathSegments.Length > 2 ? String.Join("/", pathSegments.Skip(2)) : String.Empty;
result.Reference = new RepositoryPathReference
{
RepositoryType = RepositoryTypes.GitHub,
Name = repositoryName,
Ref = gitRef,
Path = directoryPath,
};
}
}
return result;
}
else
{
// todo: build a "required" concept into the parser
context.Error(step, $"Either '{PipelineTemplateConstants.Uses}' or '{PipelineTemplateConstants.Run}' is required");
return null;
}
}
private static String ConvertToIfCondition(
TemplateContext context,
StringToken ifCondition,
Boolean isJob,
Boolean isDefaultScope)
{
if (String.IsNullOrWhiteSpace(ifCondition?.Value))
{
return $"{PipelineTemplateConstants.Success}()";
}
var condition = ifCondition.Value;
var expressionParser = new ExpressionParser();
var functions = default(IFunctionInfo[]);
var namedValues = default(INamedValueInfo[]);
if (isJob)
{
namedValues = s_jobIfNamedValues;
functions = PhaseCondition.FunctionInfo;
}
else
{
namedValues = isDefaultScope ? s_stepNamedValues : s_stepInTemplateNamedValues;
functions = s_stepConditionFunctions;
}
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{
context.Error(ifCondition, ex);
return null;
}
if (node == null)
{
return $"{PipelineTemplateConstants.Success}()";
}
var hasStatusFunction = node.Traverse().Any(x =>
{
if (x is Function function)
{
return String.Equals(function.Name, PipelineTemplateConstants.Always, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Cancelled, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase);
}
return false;
});
return hasStatusFunction ? condition : $"{PipelineTemplateConstants.Success}() && ({condition})";
}
private static readonly INamedValueInfo[] s_jobIfNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
};
private static readonly INamedValueInfo[] s_stepNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
};
private static readonly INamedValueInfo[] s_stepInTemplateNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Inputs),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
};
private static readonly IFunctionInfo[] s_stepConditionFunctions = new IFunctionInfo[]
{
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Always, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Cancelled, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Failure, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Success, 0, 0),
};
}
}

View File

@@ -46,170 +46,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb
public StrategyResult EvaluateStrategy(
TemplateToken token,
DictionaryContextData contextData,
String jobFactoryDisplayName)
{
var result = new StrategyResult();
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Strategy, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToStrategy(context, token, jobFactoryDisplayName);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
if (result.Configurations.Count == 0)
{
var configuration = new StrategyConfiguration
{
Name = PipelineConstants.DefaultJobName,
DisplayName = new JobDisplayNameBuilder(jobFactoryDisplayName).Build(),
};
configuration.ContextData.Add(PipelineTemplateConstants.Matrix, null);
configuration.ContextData.Add(
PipelineTemplateConstants.Strategy,
new DictionaryContextData
{
{
"fail-fast",
new BooleanContextData(result.FailFast)
},
{
"job-index",
new NumberContextData(0)
},
{
"job-total",
new NumberContextData(1)
},
{
"max-parallel",
new NumberContextData(1)
}
});
result.Configurations.Add(configuration);
}
return result;
}
public String EvaluateJobDisplayName(
TemplateToken token,
DictionaryContextData contextData,
String defaultDisplayName)
{
var result = default(String);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStrategyContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobDisplayName(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return !String.IsNullOrEmpty(result) ? result : defaultDisplayName;
}
public PhaseTarget EvaluateJobTarget(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(PhaseTarget);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.RunsOn, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobTarget(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? throw new InvalidOperationException("Job target cannot be null");
}
public Int32 EvaluateJobTimeout(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStrategyContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? PipelineConstants.DefaultJobTimeoutInMinutes;
}
public Int32 EvaluateJobCancelTimeout(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStrategyContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobCancelTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? PipelineConstants.DefaultJobCancelTimeoutInMinutes;
}
public DictionaryContextData EvaluateStepScopeInputs(
TemplateToken token,
DictionaryContextData contextData)

View File

@@ -1,239 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Threading;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.ObjectTemplating.Schema;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
using GitHub.DistributedTask.ObjectTemplating;
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class PipelineTemplateParser
{
static PipelineTemplateParser()
{
var schemaFactory = new PipelineTemplateSchemaFactory();
s_schema = schemaFactory.CreateSchema();
}
public PipelineTemplateParser(
ITraceWriter trace,
ParseOptions options)
{
m_trace = trace ?? throw new ArgumentNullException(nameof(trace));
m_parseOptions = new ParseOptions(options ?? throw new ArgumentNullException(nameof(options)));
}
/// <summary>
/// Loads the YAML pipeline template
/// </summary>
/// <exception cref="FileNotFoundException">Thrown when the entry YAML file does not exist</exception>
public PipelineTemplate LoadPipeline(
IFileProvider fileProvider,
RepositoryResource self,
String path,
CancellationToken cancellationToken)
{
fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider));
self = self ?? throw new ArgumentNullException(nameof(self));
var parseResult = LoadPipelineInternal(fileProvider, path, cancellationToken);
return PipelineTemplateConverter.ConvertToPipeline(parseResult.Context, self, parseResult.Value);
}
internal ParseResult LoadPipelineInternal(
IFileProvider fileProvider,
String path,
CancellationToken cancellationToken)
{
// Setup the context
var templateLoader = new YamlTemplateLoader(new ParseOptions(m_parseOptions), fileProvider);
var context = new TemplateContext
{
CancellationToken = cancellationToken,
Errors = new TemplateValidationErrors(m_parseOptions.MaxErrors, m_parseOptions.MaxErrorMessageLength),
Memory = new TemplateMemory(
maxDepth: m_parseOptions.MaxDepth,
maxEvents: m_parseOptions.MaxParseEvents,
maxBytes: m_parseOptions.MaxResultSize),
Schema = s_schema,
TraceWriter = m_trace,
};
// Load the entry file
var token = default(TemplateToken);
try
{
token = templateLoader.LoadFile(context, null, null, path, PipelineTemplateConstants.WorkflowRoot);
}
catch (Exception ex)
{
context.Errors.Add(ex);
}
var result = new ParseResult
{
Context = context,
Value = token,
};
if (token != null && context.Errors.Count == 0)
{
var templateReferenceCount = ResolveWorkflowTemplateReferences(context, templateLoader, token);
if (templateReferenceCount > 0 && context.Errors.Count == 0)
{
context.TraceWriter.Info(String.Empty);
context.TraceWriter.Info("# ");
context.TraceWriter.Info("# Template resolution complete. Final runtime YAML document:");
context.TraceWriter.Info("# ");
context.TraceWriter.Info("{0}", result.ToYaml());
}
}
return result;
}
private Int32 ResolveWorkflowTemplateReferences(
TemplateContext context,
YamlTemplateLoader templateLoader,
TemplateToken token)
{
var resolvedCount = 0;
var workflow = token.AssertMapping("workflow");
foreach (var workflowProperty in workflow)
{
var workflowPropertyName = workflowProperty.Key.AssertString("workflow property");
switch (workflowPropertyName.Value)
{
case PipelineTemplateConstants.Jobs:
resolvedCount += ResolveJobsTemplateReferences(context, templateLoader, workflowProperty.Value);
break;
case PipelineTemplateConstants.Workflow:
resolvedCount += ResolveJobsTemplateReferences(context, templateLoader, workflowProperty.Value);
break;
}
}
return resolvedCount;
}
private Int32 ResolveJobsTemplateReferences(
TemplateContext context,
YamlTemplateLoader templateLoader,
TemplateToken token)
{
var resolvedCount = 0;
var jobs = token.AssertMapping("jobs");
foreach (var jobsProperty in jobs)
{
var job = jobsProperty.Value.AssertMapping("jobs property value");
var scopes = new SequenceToken(null, null, null);
foreach (var jobProperty in job)
{
var jobPropertyName = jobProperty.Key.AssertString("job property name");
switch (jobPropertyName.Value)
{
case PipelineTemplateConstants.Steps:
resolvedCount += ResolveStepsTemplateReferences(context, templateLoader, jobProperty.Value, scopes);
break;
}
}
if (scopes.Count > 0)
{
var scopesPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Scopes);
job.Add(scopesPropertyName, scopes);
context.Memory.AddBytes(scopesPropertyName);
context.Memory.AddBytes(scopes); // Do not traverse, nested objects already accounted for
}
}
return resolvedCount;
}
private Int32 ResolveStepsTemplateReferences(
TemplateContext context,
YamlTemplateLoader templateLoader,
TemplateToken token,
SequenceToken scopes)
{
var resolvedCount = 0;
var steps = token.AssertSequence("steps");
var stepIndex = 0;
while (stepIndex < steps.Count && context.Errors.Count == 0)
{
var step = steps[stepIndex].AssertMapping("step");
if (!TemplateReference.TryCreate(step, out var reference))
{
stepIndex++;
continue;
}
resolvedCount++;
var template = templateLoader.LoadFile(
context,
reference.TemplatePath.FileId,
reference.TemplateScope,
reference.TemplatePath.Value,
PipelineTemplateConstants.StepsTemplateRoot);
if (context.Errors.Count != 0)
{
break;
}
var scope = reference.CreateScope(context, template);
if (context.Errors.Count != 0)
{
break;
}
// Remove the template reference and memory overhead
steps.RemoveAt(stepIndex);
context.Memory.SubtractBytes(step, true); // Traverse
// Remove the template memory overhead
context.Memory.SubtractBytes(template, true); // Traverse
var templateSteps = GetSteps(template);
if (templateSteps?.Count > 0)
{
// Add the steps from the template
steps.InsertRange(stepIndex, templateSteps);
context.Memory.AddBytes(templateSteps, true); // Traverse
context.Memory.SubtractBytes(templateSteps, false);
// Add the scope
scopes.Add(scope);
context.Memory.AddBytes(scope, true); // Traverse
}
}
return resolvedCount;
}
private SequenceToken GetSteps(TemplateToken template)
{
var mapping = template.AssertMapping("steps template");
foreach (var property in mapping)
{
var propertyName = property.Key.AssertString("steps template property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Steps:
return property.Value.AssertSequence("steps template steps property value");
}
}
return null;
}
private static TemplateSchema s_schema;
private readonly ParseOptions m_parseOptions;
private readonly ITraceWriter m_trace;
}
}

View File

@@ -1,121 +0,0 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
using GitHub.DistributedTask.Pipelines.Validation;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class ReferenceNameBuilder
{
internal void AppendSegment(String value)
{
if (String.IsNullOrEmpty(value))
{
return;
}
if (m_name.Length == 0)
{
var first = value[0];
if ((first >= 'a' && first <= 'z') ||
(first >= 'A' && first <= 'Z') ||
first == '_')
{
// Legal first char
}
else if ((first >= '0' && first <= '9') || first == '-')
{
// Illegal first char, but legal char.
// Prepend "_".
m_name.Append("_");
}
else
{
// Illegal char
}
}
else
{
// Separator
m_name.Append(c_separator);
}
foreach (var c in value)
{
if ((c >= 'a' && c <= 'z') ||
(c >= 'A' && c <= 'Z') ||
(c >= '0' && c <= '9') ||
c == '_' ||
c == '-')
{
// Legal
m_name.Append(c);
}
else
{
// Illegal
m_name.Append("_");
}
}
}
internal String Build()
{
var original = m_name.Length > 0 ? m_name.ToString() : "job";
var attempt = 1;
var suffix = default(String);
while (true)
{
if (attempt == 1)
{
suffix = String.Empty;
}
else if (attempt < 1000)
{
suffix = String.Format(CultureInfo.InvariantCulture, "_{0}", attempt);
}
else
{
throw new InvalidOperationException("Unable to create a unique name");
}
var candidate = original.Substring(0, Math.Min(original.Length, PipelineConstants.MaxNodeNameLength - suffix.Length)) + suffix;
if (m_distinctNames.Add(candidate))
{
m_name.Clear();
return candidate;
}
attempt++;
}
}
internal Boolean TryAddKnownName(
String value,
out String error)
{
if (!NameValidation.IsValid(value, allowHyphens: true) && value.Length < PipelineConstants.MaxNodeNameLength)
{
error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and and must be less than {PipelineConstants.MaxNodeNameLength} characters.";
return false;
}
else if (!m_distinctNames.Add(value))
{
error = $"The identifier '{value}' may not be used more than once within the same scope.";
return false;
}
else
{
error = null;
return true;
}
}
private const String c_separator = "_";
private readonly HashSet<String> m_distinctNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
private readonly StringBuilder m_name = new StringBuilder();
}
}

View File

@@ -1,37 +0,0 @@
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
public static class TaskResultExtensions
{
public static PipelineContextData ToContextData(this TaskResult result)
{
switch (result)
{
case TaskResult.Succeeded:
case TaskResult.SucceededWithIssues:
return new StringContextData(PipelineTemplateConstants.Success);
case TaskResult.Failed:
case TaskResult.Abandoned:
return new StringContextData(PipelineTemplateConstants.Failure);
case TaskResult.Canceled:
return new StringContextData(PipelineTemplateConstants.Cancelled);
case TaskResult.Skipped:
return new StringContextData(PipelineTemplateConstants.Skipped);
}
return null;
}
public static PipelineContextData ToContextData(this TaskResult? result)
{
if (result.HasValue)
{
return result.Value.ToContextData();
}
return null;
}
}
}

View File

@@ -1,197 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Threading;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
using GitHub.DistributedTask.ObjectTemplating;
internal sealed class TemplateReference
{
private TemplateReference(
String scope,
String id,
String generatedId,
StringToken templatePath,
MappingToken inputs)
{
Scope = scope;
TemplatePath = templatePath;
Inputs = inputs;
if (!String.IsNullOrEmpty(generatedId))
{
Id = generatedId;
m_isGeneratedId = true;
}
else
{
Id = id;
}
}
internal String Id { get; }
internal MappingToken Inputs { get; }
internal String Scope { get; }
internal StringToken TemplatePath { get; }
internal String TemplateScope
{
get
{
return !String.IsNullOrEmpty(Scope) ? $"{Scope}.{Id}" : Id;
}
}
internal MappingToken CreateScope(
TemplateContext context,
TemplateToken template)
{
var mapping = template.AssertMapping("template file");
// Get the inputs and outputs from the template
var inputs = default(MappingToken);
var outputs = default(MappingToken);
foreach (var pair in mapping)
{
var propertyName = pair.Key.AssertString("template file property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Inputs:
inputs = pair.Value.AssertMapping("template file inputs");
break;
case PipelineTemplateConstants.Outputs:
if (!m_isGeneratedId)
{
outputs = pair.Value.AssertMapping("template file outputs");
}
break;
}
}
// Determine allowed input names
var allowedInputNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
if (inputs?.Count > 0)
{
foreach (var pair in inputs)
{
var inputPropertyName = pair.Key.AssertString("template file inputs property");
allowedInputNames.Add(inputPropertyName.Value);
}
}
// Validate override inputs names
var overrideInputs = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var mergedInputs = new MappingToken(null, null, null);
if (Inputs?.Count > 0)
{
foreach (var pair in Inputs)
{
var inputPropertyName = pair.Key.AssertString("template reference inputs property");
if (!allowedInputNames.Contains(inputPropertyName.Value))
{
context.Error(inputPropertyName, $"Input '{inputPropertyName.Value}' is not allowed");
continue;
}
overrideInputs.Add(inputPropertyName.Value);
mergedInputs.Add(pair.Key, pair.Value);
}
}
// Merge defaults
if (inputs?.Count > 0)
{
foreach (var pair in inputs)
{
var inputPropertyName = pair.Key.AssertString("template file inputs property");
if (!overrideInputs.Contains(inputPropertyName.Value))
{
mergedInputs.Add(pair.Key, pair.Value);
}
}
}
// Build the scope object
var result = new MappingToken(null, null, null);
var namePropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Name);
var namePropertyValue = new StringToken(null, null, null, TemplateScope);
result.Add(namePropertyName, namePropertyValue);
if (mergedInputs.Count > 0)
{
var inputsPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Inputs);
result.Add(inputsPropertyName, mergedInputs);
}
if (outputs?.Count > 0)
{
var outputsPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Outputs);
result.Add(outputsPropertyName, outputs);
}
return result;
}
internal static Boolean TryCreate(
MappingToken mapping,
out TemplateReference reference)
{
var scope = default(String);
var id = default(String);
var generatedId = default(String);
var templatePath = default(StringToken);
var inputs = default(MappingToken);
foreach (var property in mapping)
{
var propertyName = property.Key.AssertString("candidate template reference property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Scope:
var scopeStringToken = property.Value.AssertString("step scope");
scope = scopeStringToken.Value;
break;
case PipelineTemplateConstants.Id:
var idStringToken = property.Value.AssertString("step id");
id = idStringToken.Value;
break;
case PipelineTemplateConstants.GeneratedId:
var generatedIdStringToken = property.Value.AssertString("step generated id");
generatedId = generatedIdStringToken.Value;
break;
case PipelineTemplateConstants.Template:
templatePath = property.Value.AssertString("step template reference");
break;
case PipelineTemplateConstants.Inputs:
inputs = property.Value.AssertMapping("step template reference inputs");
break;
}
}
if (templatePath != null)
{
reference = new TemplateReference(scope, id, generatedId, templatePath, inputs);
return true;
}
else
{
reference = null;
return false;
}
}
private Boolean m_isGeneratedId;
}
}

View File

@@ -1,572 +0,0 @@
using System;
using System.Globalization;
using System.IO;
using System.Linq;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using YamlDotNet.Core;
using YamlDotNet.Core.Events;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Converts a YAML file into a TemplateToken
/// </summary>
internal sealed class YamlObjectReader : IObjectReader
{
internal YamlObjectReader(
Int32? fileId,
TextReader input)
{
m_fileId = fileId;
m_parser = new Parser(input);
}
public Boolean AllowLiteral(out LiteralToken value)
{
if (EvaluateCurrent() is Scalar scalar)
{
// Tag specified
if (!String.IsNullOrEmpty(scalar.Tag))
{
// String tag
if (String.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
// Not plain style
if (scalar.Style != ScalarStyle.Plain)
{
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
}
// Boolean, Float, Integer, or Null
switch (scalar.Tag)
{
case c_booleanTag:
value = ParseBoolean(scalar);
break;
case c_floatTag:
value = ParseFloat(scalar);
break;
case c_integerTag:
value = ParseInteger(scalar);
break;
case c_nullTag:
value = ParseNull(scalar);
break;
default:
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
}
MoveNext();
return true;
}
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
if (scalar.Style == ScalarStyle.Plain)
{
if (MatchNull(scalar, out var nullToken))
{
value = nullToken;
}
else if (MatchBoolean(scalar, out var booleanToken))
{
value = booleanToken;
}
else if (MatchInteger(scalar, out var numberToken) ||
MatchFloat(scalar, out numberToken))
{
value = numberToken;
}
else
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
}
MoveNext();
return true;
}
// Otherwise assume string
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceStart(out SequenceToken value)
{
if (EvaluateCurrent() is SequenceStart sequenceStart)
{
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceEnd()
{
if (EvaluateCurrent() is SequenceEnd)
{
MoveNext();
return true;
}
return false;
}
public Boolean AllowMappingStart(out MappingToken value)
{
if (EvaluateCurrent() is MappingStart mappingStart)
{
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowMappingEnd()
{
if (EvaluateCurrent() is MappingEnd)
{
MoveNext();
return true;
}
return false;
}
/// <summary>
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
/// </summary>
public void ValidateEnd()
{
if (EvaluateCurrent() is DocumentEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document end parse event");
}
if (EvaluateCurrent() is StreamEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream end parse event");
}
if (MoveNext())
{
throw new InvalidOperationException("Expected end of parse events");
}
}
/// <summary>
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
/// </summary>
public void ValidateStart()
{
if (EvaluateCurrent() != null)
{
throw new InvalidOperationException("Unexpected parser state");
}
if (!MoveNext())
{
throw new InvalidOperationException("Expected a parse event");
}
if (EvaluateCurrent() is StreamStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream start parse event");
}
if (EvaluateCurrent() is DocumentStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document start parse event");
}
}
private ParsingEvent EvaluateCurrent()
{
if (m_current == null)
{
m_current = m_parser.Current;
if (m_current != null)
{
if (m_current is Scalar scalar)
{
// Verify not using achors
if (scalar.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
}
}
else if (m_current is MappingStart mappingStart)
{
// Verify not using achors
if (mappingStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
}
}
else if (m_current is SequenceStart sequenceStart)
{
// Verify not using achors
if (sequenceStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
}
}
else if (!(m_current is MappingEnd) &&
!(m_current is SequenceEnd) &&
!(m_current is DocumentStart) &&
!(m_current is DocumentEnd) &&
!(m_current is StreamStart) &&
!(m_current is StreamEnd))
{
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
}
}
}
return m_current;
}
private Boolean MoveNext()
{
m_current = null;
return m_parser.MoveNext();
}
private BooleanToken ParseBoolean(Scalar scalar)
{
if (MatchBoolean(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_booleanTag); // throws
return default;
}
private NumberToken ParseFloat(Scalar scalar)
{
if (MatchFloat(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_floatTag); // throws
return default;
}
private NumberToken ParseInteger(Scalar scalar)
{
if (MatchInteger(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_integerTag); // throws
return default;
}
private NullToken ParseNull(Scalar scalar)
{
if (MatchNull(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_nullTag); // throws
return default;
}
private Boolean MatchBoolean(
Scalar scalar,
out BooleanToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "true":
case "True":
case "TRUE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
return true;
case "false":
case "False":
case "FALSE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
return true;
}
value = default;
return false;
}
private Boolean MatchFloat(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
switch (str)
{
case ".inf":
case ".Inf":
case ".INF":
case "+.inf":
case "+.Inf":
case "+.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
return true;
case "-.inf":
case "-.Inf":
case "-.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
return true;
case ".nan":
case ".NaN":
case ".NAN":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
return true;
}
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
// Skip leading sign
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
// Check for integer portion
var length = str.Length;
var hasInteger = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasInteger = true;
index++;
}
// Check for decimal point
var hasDot = false;
if (index < length && str[index] == '.')
{
hasDot = true;
index++;
}
// Check for decimal portion
var hasDecimal = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasDecimal = true;
index++;
}
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
if ((hasDot && hasDecimal) || hasInteger)
{
// Check for end
if (index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
// Check [eE][-+]?[0-9]
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
{
index++;
// Skip sign
if (index < length && (str[index] == '-' || str[index] == '+'))
{
index++;
}
// Check for exponent
var hasExponent = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasExponent = true;
index++;
}
// Check for end
if (hasExponent && index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
}
}
}
value = default;
return false;
}
private Boolean MatchInteger(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [0-9]+
var firstChar = str[0];
if (firstChar >= '0' && firstChar <= '9' &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for (-|+)[0-9]+
else if ((firstChar == '-' || firstChar == '+') &&
str.Length > 1 &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0x[0-9a-fA-F]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'x' &&
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
{
// Try parse
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0o[0-9]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'o' &&
str.Skip(2).All(x => x >= '0' && x <= '7'))
{
// Try parse
var integerValue = default(Int32);
try
{
integerValue = Convert.ToInt32(str.Substring(2), 8);
}
// Otherwise exceeds range
catch (Exception)
{
ThrowInvalidValue(scalar, c_integerTag); // throws
}
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
}
value = default;
return false;
}
private Boolean MatchNull(
Scalar scalar,
out NullToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "":
case "null":
case "Null":
case "NULL":
case "~":
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
return true;
}
value = default;
return false;
}
private void ThrowInvalidValue(
Scalar scalar,
String tag)
{
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'");
}
private const String c_booleanTag = "tag:yaml.org,2002:bool";
private const String c_floatTag = "tag:yaml.org,2002:float";
private const String c_integerTag = "tag:yaml.org,2002:int";
private const String c_nullTag = "tag:yaml.org,2002:null";
private const String c_stringTag = "tag:yaml.org,2002:string";
private readonly Int32? m_fileId;
private readonly Parser m_parser;
private ParsingEvent m_current;
}
}

View File

@@ -1,73 +0,0 @@
using System;
using System.Globalization;
using System.IO;
using GitHub.DistributedTask.ObjectTemplating;
using YamlDotNet.Core.Events;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Converts a TemplateToken into YAML
/// </summary>
internal sealed class YamlObjectWriter : IObjectWriter
{
internal YamlObjectWriter(StringWriter writer)
{
m_emitter = new YamlDotNet.Core.Emitter(writer);
}
public void WriteString(String value)
{
m_emitter.Emit(new Scalar(value ?? String.Empty));
}
public void WriteBoolean(Boolean value)
{
m_emitter.Emit(new Scalar(value ? "true" : "false"));
}
public void WriteNumber(Double value)
{
m_emitter.Emit(new Scalar(value.ToString("G15", CultureInfo.InvariantCulture)));
}
public void WriteNull()
{
m_emitter.Emit(new Scalar("null"));
}
public void WriteSequenceStart()
{
m_emitter.Emit(new SequenceStart(null, null, true, SequenceStyle.Block));
}
public void WriteSequenceEnd()
{
m_emitter.Emit(new SequenceEnd());
}
public void WriteMappingStart()
{
m_emitter.Emit(new MappingStart());
}
public void WriteMappingEnd()
{
m_emitter.Emit(new MappingEnd());
}
public void WriteStart()
{
m_emitter.Emit(new StreamStart());
m_emitter.Emit(new DocumentStart());
}
public void WriteEnd()
{
m_emitter.Emit(new DocumentEnd(isImplicit: true));
m_emitter.Emit(new StreamEnd());
}
private readonly YamlDotNet.Core.IEmitter m_emitter;
}
}

View File

@@ -1,251 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Loads a YAML file, and returns the parsed TemplateToken
/// </summary>
internal sealed class YamlTemplateLoader
{
public YamlTemplateLoader(
ParseOptions parseOptions,
IFileProvider fileProvider)
{
m_parseOptions = new ParseOptions(parseOptions);
m_fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider));
}
public TemplateToken LoadFile(
TemplateContext context,
Int32? rootFileId,
String scope,
String path,
String templateType)
{
if (context.Errors.Count > 0)
{
throw new InvalidOperationException("Expected error count to be 0 when attempting to load a new file");
}
// Is entry file?
var isEntryFile = m_referencedFiles.Count == 0;
// Root the path
path = m_fileProvider.ResolvePath(null, path);
// Validate max files
m_referencedFiles.Add(path);
if (m_parseOptions.MaxFiles > 0 && m_referencedFiles.Count > m_parseOptions.MaxFiles)
{
throw new InvalidOperationException($"The maximum file count of {m_parseOptions.MaxFiles} has been exceeded");
}
// Get the file ID
var fileId = context.GetFileId(path);
// Check the cache
if (!m_cache.TryGetValue(path, out String fileContent))
{
// Fetch the file
context.CancellationToken.ThrowIfCancellationRequested();
fileContent = m_fileProvider.GetFileContent(path);
// Validate max file size
if (fileContent.Length > m_parseOptions.MaxFileSize)
{
throw new InvalidOperationException($"The maximum file size of {m_parseOptions.MaxFileSize} characters has been exceeded");
}
// Cache
m_cache[path] = fileContent;
}
// Deserialize
var token = default(TemplateToken);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(context, templateType, yamlObjectReader, fileId, out _);
}
// Trace
if (!isEntryFile)
{
context.TraceWriter.Info(String.Empty);
}
context.TraceWriter.Info("# ");
context.TraceWriter.Info("# {0}", path);
context.TraceWriter.Info("# ");
// Validate ref names
if (context.Errors.Count == 0)
{
switch (templateType)
{
case PipelineTemplateConstants.WorkflowRoot:
ValidateWorkflow(context, scope, token);
break;
case PipelineTemplateConstants.StepsTemplateRoot:
var stepsTemplate = token.AssertMapping("steps template");
foreach (var stepsTemplateProperty in stepsTemplate)
{
var stepsTemplatePropertyName = stepsTemplateProperty.Key.AssertString("steps template property name");
switch (stepsTemplatePropertyName.Value)
{
case PipelineTemplateConstants.Steps:
ValidateSteps(context, scope, stepsTemplateProperty.Value);
break;
}
}
break;
default:
throw new NotImplementedException($"Unexpected template type '{templateType}' when loading yaml file");
}
}
return token;
}
private void ValidateWorkflow(
TemplateContext context,
String scope,
TemplateToken token)
{
var workflow = token.AssertMapping("workflow");
foreach (var workflowProperty in workflow)
{
var workflowPropertyName = workflowProperty.Key.AssertString("workflow property name");
switch (workflowPropertyName.Value)
{
case PipelineTemplateConstants.Jobs:
case PipelineTemplateConstants.Workflow:
var jobs = workflowProperty.Value.AssertMapping("workflow property value");
foreach (var jobsProperty in jobs)
{
var job = jobsProperty.Value.AssertMapping("jobs property value");
foreach (var jobProperty in job)
{
var jobPropertyName = jobProperty.Key.AssertString("job property name");
switch (jobPropertyName.Value)
{
case PipelineTemplateConstants.Steps:
ValidateSteps(context, scope, jobProperty.Value);
break;
}
}
}
break;
}
}
}
private void ValidateSteps(
TemplateContext context,
String scope,
TemplateToken token)
{
var nameBuilder = new ReferenceNameBuilder();
var steps = token.AssertSequence("steps");
var needsReferenceName = new List<MappingToken>();
foreach (var stepsItem in steps)
{
var step = stepsItem.AssertMapping("steps item");
var isTemplateReference = false;
var hasReferenceName = false;
foreach (var stepProperty in step)
{
var stepPropertyKey = stepProperty.Key.AssertString("step property name");
switch (stepPropertyKey.Value)
{
// Validate reference names
case PipelineTemplateConstants.Id:
var referenceNameLiteral = stepProperty.Value.AssertString("step ID");
var referenceName = referenceNameLiteral.Value;
if (String.IsNullOrEmpty(referenceName))
{
continue;
}
if (!nameBuilder.TryAddKnownName(referenceName, out var error))
{
context.Error(referenceNameLiteral, error);
}
hasReferenceName = true;
break;
case PipelineTemplateConstants.Template:
isTemplateReference = true;
break;
}
}
// No reference name
if (isTemplateReference && !hasReferenceName)
{
needsReferenceName.Add(step);
}
// Stamp the scope
if (!String.IsNullOrEmpty(scope))
{
var scopePropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Scope);
var scopePropertyValue = new StringToken(null, null, null, scope);
step.Add(scopePropertyName, scopePropertyValue);
context.Memory.AddBytes(scopePropertyName);
context.Memory.AddBytes(scopePropertyValue);
}
}
// Generate reference names
if (needsReferenceName.Count > 0 && context.Errors.Count == 0)
{
foreach (var step in needsReferenceName)
{
// Get the template path
var templatePath = default(String);
foreach (var stepProperty in step)
{
var stepPropertyKey = stepProperty.Key.AssertString("step property name");
switch (stepPropertyKey.Value)
{
case PipelineTemplateConstants.Template:
var templateStringToken = stepProperty.Value.AssertString("step template path");
templatePath = templateStringToken.Value;
break;
}
}
// Generate reference name
if (!String.IsNullOrEmpty(templatePath))
{
nameBuilder.AppendSegment(templatePath);
var generatedIdPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.GeneratedId);
var generatedIdPropertyValue = new StringToken(null, null, null, nameBuilder.Build());
step.Add(generatedIdPropertyName, generatedIdPropertyValue);
context.Memory.AddBytes(generatedIdPropertyName);
context.Memory.AddBytes(generatedIdPropertyValue);
}
}
}
}
/// <summary>
/// Cache of file content
/// </summary>
private readonly Dictionary<String, String> m_cache = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
private readonly IFileProvider m_fileProvider;
private readonly ParseOptions m_parseOptions;
/// <summary>
/// Tracks unique file references
/// </summary>
private readonly HashSet<String> m_referencedFiles = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
}
}

View File

@@ -1,51 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines
{
public interface IPackageResolver
{
IList<PackageMetadata> GetPackages(String packageType);
}
public class PackageStore : IPackageStore
{
public PackageStore(params PackageMetadata[] packages)
: this(packages, null)
{
}
public PackageStore(
IEnumerable<PackageMetadata> packages = null,
IPackageResolver resolver = null)
{
this.Resolver = resolver;
m_packages = packages?.GroupBy(x => x.Type).ToDictionary(x => x.Key, x => x.ToList(), StringComparer.OrdinalIgnoreCase) ??
new Dictionary<String, List<PackageMetadata>>(StringComparer.OrdinalIgnoreCase);
}
public IPackageResolver Resolver
{
get;
}
public PackageVersion GetLatestVersion(String packageType)
{
if (!m_packages.TryGetValue(packageType, out var existingPackages))
{
var resolvedPackages = this.Resolver?.GetPackages(packageType);
if (resolvedPackages?.Count > 0)
{
existingPackages = resolvedPackages.ToList();
m_packages[packageType] = existingPackages;
}
}
return existingPackages?.OrderByDescending(x => x.Version).Select(x => x.Version).FirstOrDefault();
}
private Dictionary<String, List<PackageMetadata>> m_packages;
}
}

View File

@@ -1,315 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.DistributedTask.Pipelines.Runtime;
using GitHub.DistributedTask.WebApi;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public class ParallelExecutionOptions
{
public ParallelExecutionOptions()
{
}
private ParallelExecutionOptions(ParallelExecutionOptions optionsToCopy)
{
this.Matrix = optionsToCopy.Matrix;
this.MaxConcurrency = optionsToCopy.MaxConcurrency;
}
[DataMember(EmitDefaultValue = false)]
[JsonConverter(typeof(ExpressionValueJsonConverter<IDictionary<String, IDictionary<String, String>>>))]
public ExpressionValue<IDictionary<String, IDictionary<String, String>>> Matrix
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
[JsonConverter(typeof(ExpressionValueJsonConverter<Int32>))]
public ExpressionValue<Int32> MaxConcurrency
{
get;
set;
}
public ParallelExecutionOptions Clone()
{
return new ParallelExecutionOptions(this);
}
internal JobExecutionContext CreateJobContext(
PhaseExecutionContext context,
String jobName,
Int32 attempt,
ExpressionValue<String> container,
IDictionary<String, ExpressionValue<String>> sidecarContainers,
Boolean continueOnError,
Int32 timeoutInMinutes,
Int32 cancelTimeoutInMinutes,
IJobFactory jobFactory)
{
// perform regular expansion with a filter
var options = new JobExpansionOptions(jobName, attempt);
return GenerateJobContexts(
context,
container,
sidecarContainers,
continueOnError,
timeoutInMinutes,
cancelTimeoutInMinutes,
jobFactory,
options)
.FirstOrDefault();
}
internal ExpandPhaseResult Expand(
PhaseExecutionContext context,
ExpressionValue<String> container,
IDictionary<String, ExpressionValue<String>> sidecarContainers,
Boolean continueOnError,
Int32 timeoutInMinutes,
Int32 cancelTimeoutInMinutes,
IJobFactory jobFactory,
JobExpansionOptions options)
{
var jobContexts = GenerateJobContexts(
context,
container,
sidecarContainers,
continueOnError,
timeoutInMinutes,
cancelTimeoutInMinutes,
jobFactory,
options);
var result = new ExpandPhaseResult();
foreach (var c in jobContexts)
{
result.Jobs.Add(c.Job);
}
// parse MaxConcurrency request
var numberOfJobs = jobContexts.Count;
var userProvidedValue = context.Evaluate(
name: nameof(MaxConcurrency),
expression: this.MaxConcurrency,
defaultValue: 0).Value;
// setting max to 0 or less is shorthand for "unlimited"
if (userProvidedValue <= 0)
{
userProvidedValue = numberOfJobs;
}
result.MaxConcurrency = userProvidedValue;
return result;
}
internal IList<JobExecutionContext> GenerateJobContexts(
PhaseExecutionContext context,
ExpressionValue<String> container,
IDictionary<String, ExpressionValue<String>> sidecarContainers,
Boolean continueOnError,
Int32 timeoutInMinutes,
Int32 cancelTimeoutInMinutes,
IJobFactory jobFactory,
JobExpansionOptions options)
{
// We don't want job variables to be set into the phase context so we create a child context for each unique configuration
var jobContexts = new List<JobExecutionContext>();
void GenerateContext(
String displayName,
String configuration,
IDictionary<String, String> configurationVariables = null,
String parallelExecutionType = null,
Int32 positionInPhase = 1,
Int32 totalJobsInPhase = 1)
{
// configurations should (eventually) follow configuration naming conventions
if (String.IsNullOrEmpty(configuration))
{
configuration = PipelineConstants.DefaultJobName;
}
// Determine attempt number.
// if we have a custom value, it wins.
// if we have previously attempted this configuration,
// the new attempt number is one greater than the previous.
// 1 is the minimum attempt number
var attemptNumber = options?.GetAttemptNumber(configuration) ?? -1;
if (attemptNumber < 1)
{
var previousAttempt = context.PreviousAttempt;
if (previousAttempt != null)
{
var jobInstance = context.PreviousAttempt?.Jobs.FirstOrDefault(x => x.Job.Name.Equals(configuration, StringComparison.OrdinalIgnoreCase));
if (jobInstance != null)
{
attemptNumber = jobInstance.Job.Attempt + 1;
}
}
}
if (attemptNumber < 1)
{
attemptNumber = 1;
}
var jobContext = context.CreateJobContext(
name: configuration,
attempt: attemptNumber,
positionInPhase,
totalJobsInPhase);
// add parallel execution type
if (parallelExecutionType != null)
{
jobContext.SetSystemVariables(new List<Variable>
{
new Variable
{
Name = WellKnownDistributedTaskVariables.ParallelExecutionType,
Value = parallelExecutionType
}
});
}
if (configurationVariables != null)
{
jobContext.SetUserVariables(configurationVariables);
}
// create job model from factory
jobContext.Job.Definition = jobFactory.CreateJob(
jobContext,
container,
sidecarContainers,
continueOnError,
timeoutInMinutes,
cancelTimeoutInMinutes,
displayName);
jobContexts.Add(jobContext);
if (jobContexts.Count > context.ExecutionOptions.MaxJobExpansion)
{
// Note: this is a little weird: it might be that the max concurrency is greater than the max expansion,
// but we only throw if we actually try to generate more jobs than the max expansion.
throw new MaxJobExpansionException(PipelineStrings.PhaseJobSlicingExpansionExceedLimit(jobContexts.Count.ToString(), context.ExecutionOptions.MaxJobExpansion));
}
}
if (this.Matrix != null)
{
var matrixValue = context.Evaluate(nameof(Matrix), this.Matrix, null, traceDefault: false).Value;
var numberOfConfigurations = matrixValue?.Count ?? 0;
if (numberOfConfigurations > 0)
{
var positionInPhase = 1;
foreach (var pair in matrixValue)
{
// user-provided configuration key
var configuration = pair.Key;
var refName = configuration;
if (!PipelineUtilities.IsLegalNodeName(refName))
{
var legalNodeName = PipelineConstants.DefaultJobDisplayName + positionInPhase.ToString();
context.Trace?.Info($"\"{refName}\" is not a legal node name; node will be named \"{legalNodeName}\".");
if (context.ExecutionOptions.EnforceLegalNodeNames)
{
refName = legalNodeName;
}
}
if (options == null || options.IsIncluded(refName))
{
GenerateContext(
displayName: Phase.GenerateDisplayName(context.Phase.Definition, configuration),
configuration: refName,
configurationVariables: pair.Value,
parallelExecutionType: "MultiConfiguration",
positionInPhase: positionInPhase,
totalJobsInPhase: numberOfConfigurations);
}
++positionInPhase;
}
}
}
else if (this.MaxConcurrency is var maxConcurrencyPipelineValue && maxConcurrencyPipelineValue != null)
{
var maxConcurrency = context.Evaluate(nameof(maxConcurrencyPipelineValue), maxConcurrencyPipelineValue, 1).Value;
//If the value of context.ExecutionOptions.MaxParallelism is set, we will enforce MaxConcurrency value to be not more than context.ExecutionOptions.MaxParallelism.
//context.ExecutionOptions.MaxParallelism is currently set if the current context is hosted and public, especially to avoid abuse of services.
if (maxConcurrency > context.ExecutionOptions.MaxParallelism)
{
maxConcurrency = context.ExecutionOptions.MaxParallelism.Value;
}
if (maxConcurrency > 1)
{
if (options == null || options.Configurations == null || options.Configurations.Count == 0)
{
// generate all slices
for (var positionInPhase = 1; positionInPhase <= maxConcurrency; ++positionInPhase)
{
// NOTE: for historical reasons, the reference name of a slice is "Job" plus the slice number: "Job1"
var positionInPhaseString = positionInPhase.ToString();
GenerateContext(
displayName: Phase.GenerateDisplayName(context.Phase.Definition, positionInPhaseString),
configuration: PipelineConstants.DefaultJobDisplayName + positionInPhaseString,
configurationVariables: null,
parallelExecutionType: "MultiMachine",
positionInPhase: positionInPhase,
totalJobsInPhase: maxConcurrency);
}
}
else
{
// generate only the requested slices
foreach (var configuration in options.Configurations.Keys)
{
// determine which slices are required by parsing the configuration name (see generation code above)
var prefix = PipelineConstants.DefaultJobDisplayName;
if (!configuration.StartsWith(prefix, StringComparison.OrdinalIgnoreCase)
|| !int.TryParse(configuration.Substring(prefix.Length), out var positionInPhase))
throw new PipelineValidationException(PipelineStrings.PipelineNotValid());
GenerateContext(
displayName: Phase.GenerateDisplayName(context.Phase.Definition, positionInPhase.ToString()),
configuration: configuration,
configurationVariables: null,
parallelExecutionType: "MultiMachine",
positionInPhase: positionInPhase,
totalJobsInPhase: maxConcurrency);
}
}
}
}
// if no contexts are produced otherwise, create a default context.
if (jobContexts.Count == 0)
{
var configuration = PipelineConstants.DefaultJobName;
if (options == null || options.IsIncluded(configuration))
{
// the default display name is just the JobFactory display name
GenerateContext(
displayName: Phase.GenerateDisplayName(context.Phase.Definition),
configuration: configuration);
}
}
return jobContexts;
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Pipelines.Runtime;
namespace GitHub.DistributedTask.Pipelines
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class PhaseCondition : GraphCondition<PhaseInstance>
{
public PhaseCondition(String condition)
: base(condition)
{
}
public ConditionResult Evaluate(PhaseExecutionContext context)
{
var traceWriter = new ConditionTraceWriter();
var evaluationResult = m_parsedCondition.Evaluate(traceWriter, context.SecretMasker, context, context.ExpressionOptions);
return new ConditionResult() { Value = evaluationResult.IsTruthy, Trace = traceWriter.Trace };
}
}
}

View File

@@ -1,56 +0,0 @@
using System;
using System.ComponentModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class PhaseDependency
{
[JsonConstructor]
public PhaseDependency()
{
}
private PhaseDependency(PhaseDependency dependencyToCopy)
{
this.Scope = dependencyToCopy.Scope;
this.Event = dependencyToCopy.Event;
}
[DataMember]
public String Scope
{
get;
set;
}
[DataMember]
public String Event
{
get;
set;
}
/// <summary>
/// Implicitly converts a <c>Phase</c> to a <c>PhaseDependency</c> to enable easier modeling of graphs.
/// </summary>
/// <param name="dependency">The phase which should be converted to a dependency</param>
public static implicit operator PhaseDependency(Phase dependency)
{
return PhaseCompleted(dependency.Name);
}
public static PhaseDependency PhaseCompleted(String name)
{
return new PhaseDependency { Scope = name, Event = "Completed" };
}
internal PhaseDependency Clone()
{
return new PhaseDependency(this);
}
}
}

Some files were not shown because too many files have changed in this diff Show More