delete un-used code. (#218)

This commit is contained in:
Tingluo Huang
2019-12-16 17:05:26 -05:00
committed by GitHub
parent c3c66bb14a
commit d0a4a41a63
582 changed files with 155 additions and 66274 deletions

View File

@@ -1,24 +0,0 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.Pipelines.Runtime;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Named-value node used when evaluating graph-node conditions
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
internal sealed class GraphConditionNamedValue<TInstance> : NamedValue where TInstance : IGraphNodeInstance
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
var graphContext = context.State as GraphExecutionContext<TInstance>;
graphContext.Data.TryGetValue(Name, out var result);
return result;
}
}
}

View File

@@ -1,13 +0,0 @@
using System;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IFileProvider
{
String GetFileContent(String path);
String ResolvePath(String defaultRoot, String path);
}
}

View File

@@ -1,59 +0,0 @@
using System;
using System.Collections.Generic;
using System.Globalization;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class JobDisplayNameBuilder
{
public JobDisplayNameBuilder(String jobFactoryDisplayName)
{
if (!String.IsNullOrEmpty(jobFactoryDisplayName))
{
m_jobFactoryDisplayName = jobFactoryDisplayName;
m_segments = new List<String>();
}
}
public void AppendSegment(String value)
{
if (String.IsNullOrEmpty(value) || m_segments == null)
{
return;
}
m_segments.Add(value);
}
public String Build()
{
if (String.IsNullOrEmpty(m_jobFactoryDisplayName))
{
return null;
}
var displayName = default(String);
if (m_segments.Count == 0)
{
displayName = m_jobFactoryDisplayName;
}
else
{
var joinedSegments = String.Join(", ", m_segments);
displayName = String.Format(CultureInfo.InvariantCulture, "{0} ({1})", m_jobFactoryDisplayName, joinedSegments);
}
const Int32 maxDisplayNameLength = 100;
if (displayName.Length > maxDisplayNameLength)
{
displayName = displayName.Substring(0, maxDisplayNameLength - 3) + "...";
}
m_segments.Clear();
return displayName;
}
private readonly String m_jobFactoryDisplayName;
private readonly List<String> m_segments;
}
}

View File

@@ -1,445 +0,0 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class MatrixBuilder
{
internal MatrixBuilder(
TemplateContext context,
String jobFactoryDisplayName)
{
m_context = context;
m_jobFactoryDisplayName = jobFactoryDisplayName;
}
internal void AddVector(
String name,
SequenceToken vector)
{
m_vectors.Add(name, vector.ToContextData());
}
internal DictionaryContextData Vectors => m_vectors;
internal void Exclude(SequenceToken exclude)
{
m_excludeSequence = exclude;
}
internal void Include(SequenceToken include)
{
m_includeSequence = include;
}
internal IEnumerable<StrategyConfiguration> Build()
{
if (m_vectors.Count > 0)
{
// Parse includes/excludes
var include = new MatrixInclude(m_context, m_vectors, m_includeSequence);
var exclude = new MatrixExclude(m_context, m_vectors, m_excludeSequence);
// Calculate the cross product size
var productSize = 1;
foreach (var vectorPair in m_vectors)
{
checked
{
var vector = vectorPair.Value.AssertArray("vector");
productSize *= vector.Count;
}
}
var nameBuilder = new ReferenceNameBuilder();
var displayNameBuilder = new JobDisplayNameBuilder(m_jobFactoryDisplayName);
// Cross product
for (var productIndex = 0; productIndex < productSize; productIndex++)
{
// Matrix
var matrix = new DictionaryContextData();
var blockSize = productSize;
foreach (var vectorPair in m_vectors)
{
var vectorName = vectorPair.Key;
var vector = vectorPair.Value.AssertArray("vector");
blockSize = blockSize / vector.Count;
var vectorIndex = (productIndex / blockSize) % vector.Count;
matrix.Add(vectorName, vector[vectorIndex]);
}
// Exclude
if (exclude.Match(matrix))
{
continue;
}
// New configuration
var configuration = new StrategyConfiguration();
m_context.Memory.AddBytes(TemplateMemory.MinObjectSize);
// Gather segments for name and display name
foreach (var matrixData in matrix.Traverse(omitKeys: true))
{
var segment = default(String);
switch (matrixData?.Type)
{
case PipelineContextDataType.Boolean:
case PipelineContextDataType.Number:
case PipelineContextDataType.String:
segment = matrixData.ToString();
break;
}
if (!String.IsNullOrEmpty(segment))
{
// Name segment
nameBuilder.AppendSegment(segment);
// Display name segment
displayNameBuilder.AppendSegment(segment);
}
}
// Name
configuration.Name = nameBuilder.Build();
m_context.Memory.AddBytes(configuration.Name);
// Display name
configuration.DisplayName = displayNameBuilder.Build();
m_context.Memory.AddBytes(configuration.DisplayName);
// Include
if (include.Match(matrix, out var extra))
{
matrix.Add(extra);
}
// Matrix context
configuration.ContextData.Add(PipelineTemplateConstants.Matrix, matrix);
m_context.Memory.AddBytes(PipelineTemplateConstants.Matrix);
m_context.Memory.AddBytes(matrix, traverse: true);
// Add configuration
yield return configuration;
}
}
}
private sealed class MatrixInclude
{
public MatrixInclude(
TemplateContext context,
DictionaryContextData vectors,
SequenceToken includeSequence)
{
// Convert to excludes sets
if (includeSequence?.Count > 0)
{
foreach (var includeItem in includeSequence)
{
var includeMapping = includeItem.AssertMapping("matrix includes item");
// Distinguish filters versus extra
var filter = new MappingToken(null, null, null);
var extra = new DictionaryContextData();
foreach (var includePair in includeMapping)
{
var includeKeyLiteral = includePair.Key.AssertString("matrix include item key");
if (vectors.ContainsKey(includeKeyLiteral.Value))
{
filter.Add(includeKeyLiteral, includePair.Value);
}
else
{
extra.Add(includeKeyLiteral.Value, includePair.Value.ToContextData());
}
}
// At least one filter
if (filter.Count == 0)
{
context.Error(includeMapping, $"Matrix include mapping does not contain any filters");
continue;
}
// At least one extra
if (extra.Count == 0)
{
context.Error(includeMapping, $"Matrix include mapping does not contain any extra values to include");
continue;
}
// Add filter
m_filters.Add(new MatrixIncludeFilter(filter, extra));
}
}
}
public Boolean Match(
DictionaryContextData matrix,
out DictionaryContextData extra)
{
extra = default(DictionaryContextData);
foreach (var filter in m_filters)
{
if (filter.Match(matrix, out var items))
{
if (extra == null)
{
extra = new DictionaryContextData();
}
foreach (var pair in items)
{
extra[pair.Key] = pair.Value;
}
}
}
return extra != null;
}
private readonly List<MatrixIncludeFilter> m_filters = new List<MatrixIncludeFilter>();
}
private sealed class MatrixIncludeFilter : MatrixFilter
{
public MatrixIncludeFilter(
MappingToken filter,
DictionaryContextData extra)
: base(filter)
{
m_extra = extra;
}
public Boolean Match(
DictionaryContextData matrix,
out DictionaryContextData extra)
{
if (base.Match(matrix))
{
extra = m_extra;
return true;
}
extra = null;
return false;
}
private readonly DictionaryContextData m_extra;
}
private sealed class MatrixExclude
{
public MatrixExclude(
TemplateContext context,
DictionaryContextData vectors,
SequenceToken excludeSequence)
{
// Convert to excludes sets
if (excludeSequence?.Count > 0)
{
foreach (var excludeItem in excludeSequence)
{
var excludeMapping = excludeItem.AssertMapping("matrix excludes item");
// Check empty
if (excludeMapping.Count == 0)
{
context.Error(excludeMapping, $"Matrix exclude filter must not be empty");
continue;
}
// Validate first-level keys
foreach (var excludePair in excludeMapping)
{
var excludeKey = excludePair.Key.AssertString("matrix excludes item key");
if (!vectors.ContainsKey(excludeKey.Value))
{
context.Error(excludeKey, $"Matrix exclude key '{excludeKey.Value}' does not match any key within the matrix");
continue;
}
}
// Add filter
m_filters.Add(new MatrixExcludeFilter(excludeMapping));
}
}
}
public Boolean Match(DictionaryContextData matrix)
{
foreach (var filter in m_filters)
{
if (filter.Match(matrix))
{
return true;
}
}
return false;
}
private readonly List<MatrixExcludeFilter> m_filters = new List<MatrixExcludeFilter>();
}
private sealed class MatrixExcludeFilter : MatrixFilter
{
public MatrixExcludeFilter(MappingToken filter)
: base(filter)
{
}
public new Boolean Match(DictionaryContextData matrix)
{
return base.Match(matrix);
}
}
private abstract class MatrixFilter
{
protected MatrixFilter(MappingToken matrixFilter)
{
var state = new MappingState(null, matrixFilter);
while (state != null)
{
if (state.MoveNext())
{
var value = state.Mapping[state.Index].Value;
if (value is LiteralToken literal)
{
AddExpression(state, literal);
}
else
{
var mapping = state.Mapping[state.Index].Value.AssertMapping("matrix filter");
state = new MappingState(state, mapping);
}
}
else
{
state = state.Parent;
}
}
}
protected Boolean Match(DictionaryContextData matrix)
{
if (matrix.Count == 0)
{
throw new InvalidOperationException("Matrix filter cannot be empty");
}
foreach (var expression in m_expressions)
{
var result = expression.Evaluate(null, null, matrix, null);
if (result.IsFalsy)
{
return false;
}
}
return true;
}
private void AddExpression(
MappingState state,
LiteralToken literal)
{
var expressionLiteral = default(String);
switch (literal.Type)
{
case TokenType.Null:
expressionLiteral = ExpressionConstants.Null;
break;
case TokenType.Boolean:
var booleanToken = literal as BooleanToken;
expressionLiteral = booleanToken.Value ? ExpressionConstants.True : ExpressionConstants.False;
break;
case TokenType.Number:
var numberToken = literal as NumberToken;
expressionLiteral = String.Format(CultureInfo.InvariantCulture, ExpressionConstants.NumberFormat, numberToken.Value);
break;
case TokenType.String:
var stringToken = literal as StringToken;
expressionLiteral = $"'{ExpressionUtility.StringEscape(stringToken.Value)}'";
break;
default:
throw new NotSupportedException($"Unexpected literal type '{literal.Type}'");
}
var str = $"{state.Path} == {expressionLiteral}";
var parser = new ExpressionParser();
var expression = parser.CreateTree(str, null, s_matrixFilterNamedValues, null);
m_expressions.Add(expression);
}
private static readonly INamedValueInfo[] s_matrixFilterNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<MatrixNamedValue>(PipelineTemplateConstants.Matrix),
};
private readonly List<IExpressionNode> m_expressions = new List<IExpressionNode>();
}
private sealed class MappingState
{
public MappingState(
MappingState parent,
MappingToken mapping)
{
Parent = parent;
Mapping = mapping;
Index = -1;
}
public Boolean MoveNext()
{
if (++Index < Mapping.Count)
{
var keyLiteral = Mapping[Index].Key.AssertString("matrix filter key");
var parentPath = Parent?.Path ?? PipelineTemplateConstants.Matrix;
Path = $"{parentPath}['{ExpressionUtility.StringEscape(keyLiteral.Value)}']";
return true;
}
else
{
return false;
}
}
public MappingState Parent;
public MappingToken Mapping;
public Int32 Index;
public String Path;
}
private sealed class MatrixNamedValue : NamedValue
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
return context.State;
}
}
private readonly TemplateContext m_context;
private readonly String m_jobFactoryDisplayName;
private readonly DictionaryContextData m_vectors = new DictionaryContextData();
private SequenceToken m_excludeSequence;
private SequenceToken m_includeSequence;
}
}

View File

@@ -1,45 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Reflection;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class ParseOptions
{
public ParseOptions()
{
}
internal ParseOptions(ParseOptions copy)
{
MaxFiles = copy.MaxFiles;
MaxFileSize = copy.MaxFileSize;
MaxResultSize = copy.MaxResultSize;
}
public Int32 MaxDepth => 50;
/// <summary>
/// Gets the maximum error message length before the message will be truncated.
/// </summary>
public Int32 MaxErrorMessageLength => 500;
/// <summary>
/// Gets the maximum number of errors that can be recorded when parsing a pipeline.
/// </summary>
public Int32 MaxErrors => 10;
/// <summary>
/// Gets or sets the maximum number of files that can be loaded when parsing a pipeline. Zero or less is treated as infinite.
/// </summary>
public Int32 MaxFiles { get; set; } = 50;
public Int32 MaxFileSize { get; set; } = 1024 * 1024; // 1 mb
public Int32 MaxParseEvents => 1000000; // 1 million
public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb
}
}

View File

@@ -1,30 +0,0 @@
using System;
using System.IO;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class ParseResult
{
public TemplateContext Context { get; set; }
public TemplateToken Value { get; set; }
public String ToYaml()
{
if (Value == null)
{
return null;
}
// Serialize
using (var stringWriter = new StringWriter())
{
TemplateWriter.Write(new YamlObjectWriter(stringWriter), Value);
stringWriter.Flush();
return stringWriter.ToString();
}
}
}
}

View File

@@ -16,180 +16,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal static class PipelineTemplateConverter
{
internal static PipelineTemplate ConvertToPipeline(
TemplateContext context,
RepositoryResource self,
TemplateToken pipeline)
{
var result = new PipelineTemplate();
result.Resources.Repositories.Add(self);
var defaultStage = new Stage
{
Name = PipelineConstants.DefaultJobName,
};
result.Stages.Add(defaultStage);
try
{
if (pipeline == null || context.Errors.Count > 0)
{
return result;
}
var pipelineMapping = pipeline.AssertMapping("root");
foreach (var pipelinePair in pipelineMapping)
{
var pipelineKey = pipelinePair.Key.AssertString("root key");
switch (pipelineKey.Value)
{
case PipelineTemplateConstants.On:
break;
case PipelineTemplateConstants.Name:
break;
case PipelineTemplateConstants.Env:
result.EnvironmentVariables = pipelinePair.Value;
break;
case PipelineTemplateConstants.Jobs:
defaultStage.Phases.AddRange(ConvertToJobFactories(context, result.Resources, pipelinePair.Value));
break;
default:
pipelineKey.AssertUnexpectedValue("root key"); // throws
break;
}
}
}
catch (Exception ex)
{
context.Errors.Add(ex);
}
finally
{
if (context.Errors.Count > 0)
{
foreach (var error in context.Errors)
{
result.Errors.Add(new PipelineValidationError(error.Code, error.Message));
}
}
}
return result;
}
internal static String ConvertToJobDisplayName(
TemplateContext context,
TemplateToken displayName,
Boolean allowExpressions = false)
{
var result = default(String);
// Expression
if (allowExpressions && displayName is ExpressionToken)
{
return result;
}
// String
var displayNameString = displayName.AssertString($"job {PipelineTemplateConstants.Name}");
result = displayNameString.Value;
return result;
}
internal static PhaseTarget ConvertToJobTarget(
TemplateContext context,
TemplateToken runsOn,
Boolean allowExpressions = false)
{
var result = new AgentPoolTarget();
// Expression
if (allowExpressions && runsOn is ExpressionToken)
{
return result;
}
// String
if (runsOn is StringToken runsOnString)
{
result.Pool = new AgentPoolReference { Name = "GitHub Actions" };
result.AgentSpecification = new JObject
{
{ PipelineTemplateConstants.VmImage, runsOnString.Value }
};
}
// Mapping
else
{
var runsOnMapping = runsOn.AssertMapping($"job {PipelineTemplateConstants.RunsOn}");
foreach (var runsOnProperty in runsOnMapping)
{
// Expression
if (allowExpressions && runsOnProperty.Key is ExpressionToken)
{
continue;
}
// String
var propertyName = runsOnProperty.Key.AssertString($"job {PipelineTemplateConstants.RunsOn} key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Pool:
// Expression
if (allowExpressions && runsOnProperty.Value is ExpressionToken)
{
continue;
}
// Literal
var pool = runsOnProperty.Value.AssertString($"job {PipelineTemplateConstants.RunsOn} key");
result.Pool = new AgentPoolReference { Name = pool.Value };
break;
default:
propertyName.AssertUnexpectedValue($"job {PipelineTemplateConstants.RunsOn} key"); // throws
break;
}
}
}
return result;
}
internal static Int32? ConvertToJobTimeout(
TemplateContext context,
TemplateToken token,
Boolean allowExpressions = false)
{
if (allowExpressions && token is ExpressionToken)
{
return null;
}
var numberToken = token.AssertNumber($"job {PipelineTemplateConstants.TimeoutMinutes}");
return (Int32)numberToken.Value;
}
internal static Int32? ConvertToJobCancelTimeout(
TemplateContext context,
TemplateToken token,
Boolean allowExpressions = false)
{
if (allowExpressions && token is ExpressionToken)
{
return null;
}
var numberToken = token.AssertNumber($"job {PipelineTemplateConstants.CancelTimeoutMinutes}");
return (Int32)numberToken.Value;
}
internal static Boolean? ConvertToStepContinueOnError(
TemplateContext context,
TemplateToken token,
@@ -315,203 +141,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return (Int32)numberToken.Value;
}
internal static StrategyResult ConvertToStrategy(
TemplateContext context,
TemplateToken token,
String jobFactoryDisplayName,
Boolean allowExpressions = false)
{
var result = new StrategyResult();
// Expression
if (allowExpressions && token is ExpressionToken)
{
return result;
}
var strategyMapping = token.AssertMapping(PipelineTemplateConstants.Strategy);
var matrixBuilder = default(MatrixBuilder);
var hasExpressions = false;
foreach (var strategyPair in strategyMapping)
{
// Expression key
if (allowExpressions && strategyPair.Key is ExpressionToken)
{
hasExpressions = true;
continue;
}
// Literal key
var strategyKey = strategyPair.Key.AssertString("strategy key");
switch (strategyKey.Value)
{
// Fail-Fast
case PipelineTemplateConstants.FailFast:
if (allowExpressions && strategyPair.Value is ExpressionToken)
{
hasExpressions = true;
continue;
}
var failFastBooleanToken = strategyPair.Value.AssertBoolean($"strategy {PipelineTemplateConstants.FailFast}");
result.FailFast = failFastBooleanToken.Value;
break;
// Max-Parallel
case PipelineTemplateConstants.MaxParallel:
if (allowExpressions && strategyPair.Value is ExpressionToken)
{
hasExpressions = true;
continue;
}
var maxParallelNumberToken = strategyPair.Value.AssertNumber($"strategy {PipelineTemplateConstants.MaxParallel}");
result.MaxParallel = (Int32)maxParallelNumberToken.Value;
break;
// Matrix
case PipelineTemplateConstants.Matrix:
// Expression
if (allowExpressions && strategyPair.Value is ExpressionToken)
{
hasExpressions = true;
continue;
}
var matrix = strategyPair.Value.AssertMapping("matrix");
hasExpressions = hasExpressions || matrix.Traverse().Any(x => x is ExpressionToken);
matrixBuilder = new MatrixBuilder(context, jobFactoryDisplayName);
var hasVector = false;
foreach (var matrixPair in matrix)
{
// Expression key
if (allowExpressions && matrixPair.Key is ExpressionToken)
{
hasVector = true; // For validation, treat as if a vector is defined
continue;
}
var matrixKey = matrixPair.Key.AssertString("matrix key");
switch (matrixKey.Value)
{
case PipelineTemplateConstants.Include:
if (allowExpressions && matrixPair.Value is ExpressionToken)
{
continue;
}
var includeSequence = matrixPair.Value.AssertSequence("matrix includes");
matrixBuilder.Include(includeSequence);
break;
case PipelineTemplateConstants.Exclude:
if (allowExpressions && matrixPair.Value is ExpressionToken)
{
continue;
}
var excludeSequence = matrixPair.Value.AssertSequence("matrix excludes");
matrixBuilder.Exclude(excludeSequence);
break;
default:
hasVector = true;
if (allowExpressions && matrixPair.Value is ExpressionToken)
{
continue;
}
var vectorName = matrixKey.Value;
var vectorSequence = matrixPair.Value.AssertSequence("matrix vector value");
if (vectorSequence.Count == 0)
{
context.Error(vectorSequence, $"Matrix vector '{vectorName}' does not contain any values");
}
else
{
matrixBuilder.AddVector(vectorName, vectorSequence);
}
break;
}
}
if (!hasVector)
{
context.Error(matrix, $"Matrix must defined at least one vector");
}
break;
default:
strategyKey.AssertUnexpectedValue("strategy key"); // throws
break;
}
}
if (hasExpressions)
{
return result;
}
if (matrixBuilder != null)
{
result.Configurations.AddRange(matrixBuilder.Build());
}
for (var i = 0; i < result.Configurations.Count; i++)
{
var configuration = result.Configurations[i];
var strategy = new DictionaryContextData()
{
{
"fail-fast",
new BooleanContextData(result.FailFast)
},
{
"job-index",
new NumberContextData(i)
},
{
"job-total",
new NumberContextData(result.Configurations.Count)
}
};
if (result.MaxParallel > 0)
{
strategy.Add(
"max-parallel",
new NumberContextData(result.MaxParallel)
);
}
else
{
strategy.Add(
"max-parallel",
new NumberContextData(result.Configurations.Count)
);
}
configuration.ContextData.Add(PipelineTemplateConstants.Strategy, strategy);
context.Memory.AddBytes(PipelineTemplateConstants.Strategy);
context.Memory.AddBytes(strategy, traverse: true);
if (!configuration.ContextData.ContainsKey(PipelineTemplateConstants.Matrix))
{
configuration.ContextData.Add(PipelineTemplateConstants.Matrix, null);
context.Memory.AddBytes(PipelineTemplateConstants.Matrix);
}
}
return result;
}
internal static JobContainer ConvertToJobContainer(
TemplateContext context,
TemplateToken value,
@@ -616,532 +245,5 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
private static IEnumerable<PhaseNode> ConvertToJobFactories(
TemplateContext context,
PipelineResources resources,
TemplateToken workflow)
{
var jobsMapping = workflow.AssertMapping(PipelineTemplateConstants.Jobs);
foreach (var jobsPair in jobsMapping)
{
var jobNameToken = jobsPair.Key.AssertString($"{PipelineTemplateConstants.Jobs} key");
if (!NameValidation.IsValid(jobNameToken.Value, true))
{
context.Error(jobNameToken, $"Job name {jobNameToken.Value} is invalid. Names must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'");
}
var result = new JobFactory
{
Name = jobNameToken.Value
};
var jobFactoryDefinition = jobsPair.Value.AssertMapping($"{PipelineTemplateConstants.Jobs} value");
foreach (var jobFactoryProperty in jobFactoryDefinition)
{
var propertyName = jobFactoryProperty.Key.AssertString($"job property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.ContinueOnError:
var continueOnErrorBooleanToken = jobFactoryProperty.Value.AssertBoolean($"job {PipelineTemplateConstants.ContinueOnError}");
result.ContinueOnError = continueOnErrorBooleanToken.Value;
break;
case PipelineTemplateConstants.If:
var ifCondition = jobFactoryProperty.Value.AssertString($"job {PipelineTemplateConstants.If}");
result.Condition = ConvertToIfCondition(context, ifCondition, true, true);
break;
case PipelineTemplateConstants.Name:
var displayName = jobFactoryProperty.Value.AssertScalar($"job {PipelineTemplateConstants.Name}");
ConvertToJobDisplayName(context, displayName, allowExpressions: true); // Validate early if possible
if (displayName is StringToken)
{
result.DisplayName = displayName.ToString();
}
else
{
result.JobDisplayName = displayName.Clone(true) as ExpressionToken;
}
break;
case PipelineTemplateConstants.Needs:
if (jobFactoryProperty.Value is StringToken needsLiteral)
{
result.DependsOn.Add(needsLiteral.Value);
}
else
{
var needs = jobFactoryProperty.Value.AssertSequence($"job {PipelineTemplateConstants.Needs}");
foreach (var needsItem in needs)
{
var need = needsItem.AssertString($"job {PipelineTemplateConstants.Needs} item");
result.DependsOn.Add(need.Value);
}
}
break;
case PipelineTemplateConstants.RunsOn:
ConvertToJobTarget(context, jobFactoryProperty.Value, allowExpressions: true); // Validate early if possible
result.JobTarget = jobFactoryProperty.Value.Clone(true);
break;
case PipelineTemplateConstants.Scopes:
foreach (var scope in ConvertToScopes(context, jobFactoryProperty.Value))
{
result.Scopes.Add(scope);
}
break;
case PipelineTemplateConstants.Steps:
result.Steps.AddRange(ConvertToSteps(context, jobFactoryProperty.Value));
break;
case PipelineTemplateConstants.Strategy:
ConvertToStrategy(context, jobFactoryProperty.Value, null, allowExpressions: true); // Validate early if possible
result.Strategy = jobFactoryProperty.Value.Clone(true);
break;
case PipelineTemplateConstants.TimeoutMinutes:
ConvertToJobTimeout(context, jobFactoryProperty.Value, allowExpressions: true); // Validate early if possible
result.JobTimeout = jobFactoryProperty.Value.Clone(true) as ScalarToken;
break;
case PipelineTemplateConstants.CancelTimeoutMinutes:
ConvertToJobCancelTimeout(context, jobFactoryProperty.Value, allowExpressions: true); // Validate early if possible
result.JobCancelTimeout = jobFactoryProperty.Value.Clone(true) as ScalarToken;
break;
case PipelineTemplateConstants.Container:
ConvertToJobContainer(context, jobFactoryProperty.Value, allowExpressions: true);
result.JobContainer = jobFactoryProperty.Value.Clone(true);
break;
case PipelineTemplateConstants.Services:
ConvertToJobServiceContainers(context, jobFactoryProperty.Value, allowExpressions: true);
result.JobServiceContainers = jobFactoryProperty.Value.Clone(true);
break;
case PipelineTemplateConstants.Env:
result.EnvironmentVariables = jobFactoryProperty.Value.Clone(true);
break;
default:
propertyName.AssertUnexpectedValue("job key"); // throws
break;
}
}
// todo: Move "required" support into schema validation
if (result.JobTarget == null)
{
context.Error(jobFactoryDefinition, $"The '{PipelineTemplateConstants.RunsOn}' property is required");
}
if (String.IsNullOrEmpty(result.DisplayName))
{
result.DisplayName = result.Name;
}
if (result.Scopes.Count > 0)
{
result.Steps.Insert(
0,
new ActionStep
{
Reference = new ScriptReference(),
DisplayName = "WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL",
Inputs = new MappingToken(null, null, null)
{
{
new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script),
new StringToken(null, null, null, "echo WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL")
}
}
});
result.Steps.Add(
new ActionStep
{
Reference = new ScriptReference(),
DisplayName = "WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL",
Inputs = new MappingToken(null, null, null)
{
{
new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script),
new StringToken(null, null, null, "echo WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL")
}
}
});
}
yield return result;
}
}
private static IEnumerable<ContextScope> ConvertToScopes(
TemplateContext context,
TemplateToken scopes)
{
var scopesSequence = scopes.AssertSequence($"job {PipelineTemplateConstants.Scopes}");
foreach (var scopesItem in scopesSequence)
{
var result = new ContextScope();
var scope = scopesItem.AssertMapping($"{PipelineTemplateConstants.Scopes} item");
foreach (var scopeProperty in scope)
{
var propertyName = scopeProperty.Key.AssertString($"{PipelineTemplateConstants.Scopes} item key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Name:
var nameLiteral = scopeProperty.Value.AssertString($"{PipelineTemplateConstants.Scopes} item {PipelineTemplateConstants.Name}");
result.Name = nameLiteral.Value;
break;
case PipelineTemplateConstants.Inputs:
result.Inputs = scopeProperty.Value.AssertMapping($"{PipelineTemplateConstants.Scopes} item {PipelineTemplateConstants.Inputs}");
break;
case PipelineTemplateConstants.Outputs:
result.Outputs = scopeProperty.Value.AssertMapping($"{PipelineTemplateConstants.Scopes} item {PipelineTemplateConstants.Outputs}");
break;
}
}
yield return result;
}
}
private static List<Step> ConvertToSteps(
TemplateContext context,
TemplateToken steps)
{
var stepsSequence = steps.AssertSequence($"job {PipelineTemplateConstants.Steps}");
var result = new List<Step>();
foreach (var stepsItem in stepsSequence)
{
var step = ConvertToStep(context, stepsItem);
if (step != null) // step = null means we are hitting error during step conversion, there should be an error in context.errors
{
if (step.Enabled)
{
result.Add(step);
}
}
}
return result;
}
private static ActionStep ConvertToStep(
TemplateContext context,
TemplateToken stepsItem)
{
var step = stepsItem.AssertMapping($"{PipelineTemplateConstants.Steps} item");
var continueOnError = default(ScalarToken);
var env = default(TemplateToken);
var id = default(StringToken);
var ifCondition = default(String);
var ifToken = default(StringToken);
var name = default(ScalarToken);
var run = default(ScalarToken);
var scope = default(StringToken);
var timeoutMinutes = default(ScalarToken);
var uses = default(StringToken);
var with = default(TemplateToken);
var workingDir = default(ScalarToken);
var path = default(ScalarToken);
var clean = default(ScalarToken);
var fetchDepth = default(ScalarToken);
var lfs = default(ScalarToken);
var submodules = default(ScalarToken);
var shell = default(ScalarToken);
foreach (var stepProperty in step)
{
var propertyName = stepProperty.Key.AssertString($"{PipelineTemplateConstants.Steps} item key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Clean:
clean = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Clean}");
break;
case PipelineTemplateConstants.ContinueOnError:
ConvertToStepContinueOnError(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
continueOnError = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} {PipelineTemplateConstants.ContinueOnError}");
break;
case PipelineTemplateConstants.Env:
ConvertToStepEnvironment(context, stepProperty.Value, StringComparer.Ordinal, allowExpressions: true); // Validate early if possible
env = stepProperty.Value;
break;
case PipelineTemplateConstants.FetchDepth:
fetchDepth = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.FetchDepth}");
break;
case PipelineTemplateConstants.Id:
id = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Id}");
if (!NameValidation.IsValid(id.Value, true))
{
context.Error(id, $"Step id {id.Value} is invalid. Ids must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'");
}
break;
case PipelineTemplateConstants.If:
ifToken = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.If}");
break;
case PipelineTemplateConstants.Lfs:
lfs = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Lfs}");
break;
case PipelineTemplateConstants.Name:
name = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Name}");
break;
case PipelineTemplateConstants.Path:
path = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Path}");
break;
case PipelineTemplateConstants.Run:
run = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Run}");
break;
case PipelineTemplateConstants.Shell:
shell = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Shell}");
break;
case PipelineTemplateConstants.Scope:
scope = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Scope}");
break;
case PipelineTemplateConstants.Submodules:
submodules = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Submodules}");
break;
case PipelineTemplateConstants.TimeoutMinutes:
ConvertToStepTimeout(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
timeoutMinutes = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.TimeoutMinutes}");
break;
case PipelineTemplateConstants.Uses:
uses = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
break;
case PipelineTemplateConstants.With:
ConvertToStepInputs(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
with = stepProperty.Value;
break;
case PipelineTemplateConstants.WorkingDirectory:
workingDir = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.WorkingDirectory}");
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Steps} item key"); // throws
break;
}
}
// Fixup the if-condition
var isDefaultScope = String.IsNullOrEmpty(scope?.Value);
ifCondition = ConvertToIfCondition(context, ifToken, false, isDefaultScope);
if (run != null)
{
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError?.Clone(true) as ScalarToken,
DisplayNameToken = name?.Clone(true) as ScalarToken,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes?.Clone(true) as ScalarToken,
Environment = env?.Clone(true),
Reference = new ScriptReference(),
};
var inputs = new MappingToken(null, null, null);
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), run.Clone(true));
if (workingDir != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.WorkingDirectory), workingDir.Clone(true));
}
if (shell != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Shell), shell.Clone(true));
}
result.Inputs = inputs;
return result;
}
else if (uses != null)
{
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError?.Clone(true) as ScalarToken,
DisplayNameToken = name?.Clone(true) as ScalarToken,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes?.Clone(true) as ScalarToken,
Inputs = with,
Environment = env,
};
if (uses.Value.StartsWith("docker://", StringComparison.Ordinal))
{
var image = uses.Value.Substring("docker://".Length);
result.Reference = new ContainerRegistryReference { Image = image };
}
else if (uses.Value.StartsWith("./") || uses.Value.StartsWith(".\\"))
{
result.Reference = new RepositoryPathReference
{
RepositoryType = PipelineConstants.SelfAlias,
Path = uses.Value
};
}
else
{
var usesSegments = uses.Value.Split('@');
var pathSegments = usesSegments[0].Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
var gitRef = usesSegments.Length == 2 ? usesSegments[1] : String.Empty;
if (usesSegments.Length != 2 ||
pathSegments.Length < 2 ||
String.IsNullOrEmpty(pathSegments[0]) ||
String.IsNullOrEmpty(pathSegments[1]) ||
String.IsNullOrEmpty(gitRef))
{
// todo: loc
context.Error(uses, $"Expected format {{org}}/{{repo}}[/path]@ref. Actual '{uses.Value}'");
}
else
{
var repositoryName = $"{pathSegments[0]}/{pathSegments[1]}";
var directoryPath = pathSegments.Length > 2 ? String.Join("/", pathSegments.Skip(2)) : String.Empty;
result.Reference = new RepositoryPathReference
{
RepositoryType = RepositoryTypes.GitHub,
Name = repositoryName,
Ref = gitRef,
Path = directoryPath,
};
}
}
return result;
}
else
{
// todo: build a "required" concept into the parser
context.Error(step, $"Either '{PipelineTemplateConstants.Uses}' or '{PipelineTemplateConstants.Run}' is required");
return null;
}
}
private static String ConvertToIfCondition(
TemplateContext context,
StringToken ifCondition,
Boolean isJob,
Boolean isDefaultScope)
{
if (String.IsNullOrWhiteSpace(ifCondition?.Value))
{
return $"{PipelineTemplateConstants.Success}()";
}
var condition = ifCondition.Value;
var expressionParser = new ExpressionParser();
var functions = default(IFunctionInfo[]);
var namedValues = default(INamedValueInfo[]);
if (isJob)
{
namedValues = s_jobIfNamedValues;
functions = PhaseCondition.FunctionInfo;
}
else
{
namedValues = isDefaultScope ? s_stepNamedValues : s_stepInTemplateNamedValues;
functions = s_stepConditionFunctions;
}
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{
context.Error(ifCondition, ex);
return null;
}
if (node == null)
{
return $"{PipelineTemplateConstants.Success}()";
}
var hasStatusFunction = node.Traverse().Any(x =>
{
if (x is Function function)
{
return String.Equals(function.Name, PipelineTemplateConstants.Always, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Cancelled, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase);
}
return false;
});
return hasStatusFunction ? condition : $"{PipelineTemplateConstants.Success}() && ({condition})";
}
private static readonly INamedValueInfo[] s_jobIfNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
};
private static readonly INamedValueInfo[] s_stepNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
};
private static readonly INamedValueInfo[] s_stepInTemplateNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Inputs),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
};
private static readonly IFunctionInfo[] s_stepConditionFunctions = new IFunctionInfo[]
{
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Always, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Cancelled, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Failure, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Success, 0, 0),
};
}
}

View File

@@ -46,170 +46,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb
public StrategyResult EvaluateStrategy(
TemplateToken token,
DictionaryContextData contextData,
String jobFactoryDisplayName)
{
var result = new StrategyResult();
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Strategy, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToStrategy(context, token, jobFactoryDisplayName);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
if (result.Configurations.Count == 0)
{
var configuration = new StrategyConfiguration
{
Name = PipelineConstants.DefaultJobName,
DisplayName = new JobDisplayNameBuilder(jobFactoryDisplayName).Build(),
};
configuration.ContextData.Add(PipelineTemplateConstants.Matrix, null);
configuration.ContextData.Add(
PipelineTemplateConstants.Strategy,
new DictionaryContextData
{
{
"fail-fast",
new BooleanContextData(result.FailFast)
},
{
"job-index",
new NumberContextData(0)
},
{
"job-total",
new NumberContextData(1)
},
{
"max-parallel",
new NumberContextData(1)
}
});
result.Configurations.Add(configuration);
}
return result;
}
public String EvaluateJobDisplayName(
TemplateToken token,
DictionaryContextData contextData,
String defaultDisplayName)
{
var result = default(String);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStrategyContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobDisplayName(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return !String.IsNullOrEmpty(result) ? result : defaultDisplayName;
}
public PhaseTarget EvaluateJobTarget(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(PhaseTarget);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.RunsOn, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobTarget(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? throw new InvalidOperationException("Job target cannot be null");
}
public Int32 EvaluateJobTimeout(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStrategyContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? PipelineConstants.DefaultJobTimeoutInMinutes;
}
public Int32 EvaluateJobCancelTimeout(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStrategyContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobCancelTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? PipelineConstants.DefaultJobCancelTimeoutInMinutes;
}
public DictionaryContextData EvaluateStepScopeInputs(
TemplateToken token,
DictionaryContextData contextData)

View File

@@ -1,239 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Threading;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.ObjectTemplating.Schema;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
using GitHub.DistributedTask.ObjectTemplating;
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class PipelineTemplateParser
{
static PipelineTemplateParser()
{
var schemaFactory = new PipelineTemplateSchemaFactory();
s_schema = schemaFactory.CreateSchema();
}
public PipelineTemplateParser(
ITraceWriter trace,
ParseOptions options)
{
m_trace = trace ?? throw new ArgumentNullException(nameof(trace));
m_parseOptions = new ParseOptions(options ?? throw new ArgumentNullException(nameof(options)));
}
/// <summary>
/// Loads the YAML pipeline template
/// </summary>
/// <exception cref="FileNotFoundException">Thrown when the entry YAML file does not exist</exception>
public PipelineTemplate LoadPipeline(
IFileProvider fileProvider,
RepositoryResource self,
String path,
CancellationToken cancellationToken)
{
fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider));
self = self ?? throw new ArgumentNullException(nameof(self));
var parseResult = LoadPipelineInternal(fileProvider, path, cancellationToken);
return PipelineTemplateConverter.ConvertToPipeline(parseResult.Context, self, parseResult.Value);
}
internal ParseResult LoadPipelineInternal(
IFileProvider fileProvider,
String path,
CancellationToken cancellationToken)
{
// Setup the context
var templateLoader = new YamlTemplateLoader(new ParseOptions(m_parseOptions), fileProvider);
var context = new TemplateContext
{
CancellationToken = cancellationToken,
Errors = new TemplateValidationErrors(m_parseOptions.MaxErrors, m_parseOptions.MaxErrorMessageLength),
Memory = new TemplateMemory(
maxDepth: m_parseOptions.MaxDepth,
maxEvents: m_parseOptions.MaxParseEvents,
maxBytes: m_parseOptions.MaxResultSize),
Schema = s_schema,
TraceWriter = m_trace,
};
// Load the entry file
var token = default(TemplateToken);
try
{
token = templateLoader.LoadFile(context, null, null, path, PipelineTemplateConstants.WorkflowRoot);
}
catch (Exception ex)
{
context.Errors.Add(ex);
}
var result = new ParseResult
{
Context = context,
Value = token,
};
if (token != null && context.Errors.Count == 0)
{
var templateReferenceCount = ResolveWorkflowTemplateReferences(context, templateLoader, token);
if (templateReferenceCount > 0 && context.Errors.Count == 0)
{
context.TraceWriter.Info(String.Empty);
context.TraceWriter.Info("# ");
context.TraceWriter.Info("# Template resolution complete. Final runtime YAML document:");
context.TraceWriter.Info("# ");
context.TraceWriter.Info("{0}", result.ToYaml());
}
}
return result;
}
private Int32 ResolveWorkflowTemplateReferences(
TemplateContext context,
YamlTemplateLoader templateLoader,
TemplateToken token)
{
var resolvedCount = 0;
var workflow = token.AssertMapping("workflow");
foreach (var workflowProperty in workflow)
{
var workflowPropertyName = workflowProperty.Key.AssertString("workflow property");
switch (workflowPropertyName.Value)
{
case PipelineTemplateConstants.Jobs:
resolvedCount += ResolveJobsTemplateReferences(context, templateLoader, workflowProperty.Value);
break;
case PipelineTemplateConstants.Workflow:
resolvedCount += ResolveJobsTemplateReferences(context, templateLoader, workflowProperty.Value);
break;
}
}
return resolvedCount;
}
private Int32 ResolveJobsTemplateReferences(
TemplateContext context,
YamlTemplateLoader templateLoader,
TemplateToken token)
{
var resolvedCount = 0;
var jobs = token.AssertMapping("jobs");
foreach (var jobsProperty in jobs)
{
var job = jobsProperty.Value.AssertMapping("jobs property value");
var scopes = new SequenceToken(null, null, null);
foreach (var jobProperty in job)
{
var jobPropertyName = jobProperty.Key.AssertString("job property name");
switch (jobPropertyName.Value)
{
case PipelineTemplateConstants.Steps:
resolvedCount += ResolveStepsTemplateReferences(context, templateLoader, jobProperty.Value, scopes);
break;
}
}
if (scopes.Count > 0)
{
var scopesPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Scopes);
job.Add(scopesPropertyName, scopes);
context.Memory.AddBytes(scopesPropertyName);
context.Memory.AddBytes(scopes); // Do not traverse, nested objects already accounted for
}
}
return resolvedCount;
}
private Int32 ResolveStepsTemplateReferences(
TemplateContext context,
YamlTemplateLoader templateLoader,
TemplateToken token,
SequenceToken scopes)
{
var resolvedCount = 0;
var steps = token.AssertSequence("steps");
var stepIndex = 0;
while (stepIndex < steps.Count && context.Errors.Count == 0)
{
var step = steps[stepIndex].AssertMapping("step");
if (!TemplateReference.TryCreate(step, out var reference))
{
stepIndex++;
continue;
}
resolvedCount++;
var template = templateLoader.LoadFile(
context,
reference.TemplatePath.FileId,
reference.TemplateScope,
reference.TemplatePath.Value,
PipelineTemplateConstants.StepsTemplateRoot);
if (context.Errors.Count != 0)
{
break;
}
var scope = reference.CreateScope(context, template);
if (context.Errors.Count != 0)
{
break;
}
// Remove the template reference and memory overhead
steps.RemoveAt(stepIndex);
context.Memory.SubtractBytes(step, true); // Traverse
// Remove the template memory overhead
context.Memory.SubtractBytes(template, true); // Traverse
var templateSteps = GetSteps(template);
if (templateSteps?.Count > 0)
{
// Add the steps from the template
steps.InsertRange(stepIndex, templateSteps);
context.Memory.AddBytes(templateSteps, true); // Traverse
context.Memory.SubtractBytes(templateSteps, false);
// Add the scope
scopes.Add(scope);
context.Memory.AddBytes(scope, true); // Traverse
}
}
return resolvedCount;
}
private SequenceToken GetSteps(TemplateToken template)
{
var mapping = template.AssertMapping("steps template");
foreach (var property in mapping)
{
var propertyName = property.Key.AssertString("steps template property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Steps:
return property.Value.AssertSequence("steps template steps property value");
}
}
return null;
}
private static TemplateSchema s_schema;
private readonly ParseOptions m_parseOptions;
private readonly ITraceWriter m_trace;
}
}

View File

@@ -1,121 +0,0 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
using GitHub.DistributedTask.Pipelines.Validation;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class ReferenceNameBuilder
{
internal void AppendSegment(String value)
{
if (String.IsNullOrEmpty(value))
{
return;
}
if (m_name.Length == 0)
{
var first = value[0];
if ((first >= 'a' && first <= 'z') ||
(first >= 'A' && first <= 'Z') ||
first == '_')
{
// Legal first char
}
else if ((first >= '0' && first <= '9') || first == '-')
{
// Illegal first char, but legal char.
// Prepend "_".
m_name.Append("_");
}
else
{
// Illegal char
}
}
else
{
// Separator
m_name.Append(c_separator);
}
foreach (var c in value)
{
if ((c >= 'a' && c <= 'z') ||
(c >= 'A' && c <= 'Z') ||
(c >= '0' && c <= '9') ||
c == '_' ||
c == '-')
{
// Legal
m_name.Append(c);
}
else
{
// Illegal
m_name.Append("_");
}
}
}
internal String Build()
{
var original = m_name.Length > 0 ? m_name.ToString() : "job";
var attempt = 1;
var suffix = default(String);
while (true)
{
if (attempt == 1)
{
suffix = String.Empty;
}
else if (attempt < 1000)
{
suffix = String.Format(CultureInfo.InvariantCulture, "_{0}", attempt);
}
else
{
throw new InvalidOperationException("Unable to create a unique name");
}
var candidate = original.Substring(0, Math.Min(original.Length, PipelineConstants.MaxNodeNameLength - suffix.Length)) + suffix;
if (m_distinctNames.Add(candidate))
{
m_name.Clear();
return candidate;
}
attempt++;
}
}
internal Boolean TryAddKnownName(
String value,
out String error)
{
if (!NameValidation.IsValid(value, allowHyphens: true) && value.Length < PipelineConstants.MaxNodeNameLength)
{
error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and and must be less than {PipelineConstants.MaxNodeNameLength} characters.";
return false;
}
else if (!m_distinctNames.Add(value))
{
error = $"The identifier '{value}' may not be used more than once within the same scope.";
return false;
}
else
{
error = null;
return true;
}
}
private const String c_separator = "_";
private readonly HashSet<String> m_distinctNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
private readonly StringBuilder m_name = new StringBuilder();
}
}

View File

@@ -1,37 +0,0 @@
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
public static class TaskResultExtensions
{
public static PipelineContextData ToContextData(this TaskResult result)
{
switch (result)
{
case TaskResult.Succeeded:
case TaskResult.SucceededWithIssues:
return new StringContextData(PipelineTemplateConstants.Success);
case TaskResult.Failed:
case TaskResult.Abandoned:
return new StringContextData(PipelineTemplateConstants.Failure);
case TaskResult.Canceled:
return new StringContextData(PipelineTemplateConstants.Cancelled);
case TaskResult.Skipped:
return new StringContextData(PipelineTemplateConstants.Skipped);
}
return null;
}
public static PipelineContextData ToContextData(this TaskResult? result)
{
if (result.HasValue)
{
return result.Value.ToContextData();
}
return null;
}
}
}

View File

@@ -1,197 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Threading;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
using GitHub.DistributedTask.ObjectTemplating;
internal sealed class TemplateReference
{
private TemplateReference(
String scope,
String id,
String generatedId,
StringToken templatePath,
MappingToken inputs)
{
Scope = scope;
TemplatePath = templatePath;
Inputs = inputs;
if (!String.IsNullOrEmpty(generatedId))
{
Id = generatedId;
m_isGeneratedId = true;
}
else
{
Id = id;
}
}
internal String Id { get; }
internal MappingToken Inputs { get; }
internal String Scope { get; }
internal StringToken TemplatePath { get; }
internal String TemplateScope
{
get
{
return !String.IsNullOrEmpty(Scope) ? $"{Scope}.{Id}" : Id;
}
}
internal MappingToken CreateScope(
TemplateContext context,
TemplateToken template)
{
var mapping = template.AssertMapping("template file");
// Get the inputs and outputs from the template
var inputs = default(MappingToken);
var outputs = default(MappingToken);
foreach (var pair in mapping)
{
var propertyName = pair.Key.AssertString("template file property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Inputs:
inputs = pair.Value.AssertMapping("template file inputs");
break;
case PipelineTemplateConstants.Outputs:
if (!m_isGeneratedId)
{
outputs = pair.Value.AssertMapping("template file outputs");
}
break;
}
}
// Determine allowed input names
var allowedInputNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
if (inputs?.Count > 0)
{
foreach (var pair in inputs)
{
var inputPropertyName = pair.Key.AssertString("template file inputs property");
allowedInputNames.Add(inputPropertyName.Value);
}
}
// Validate override inputs names
var overrideInputs = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var mergedInputs = new MappingToken(null, null, null);
if (Inputs?.Count > 0)
{
foreach (var pair in Inputs)
{
var inputPropertyName = pair.Key.AssertString("template reference inputs property");
if (!allowedInputNames.Contains(inputPropertyName.Value))
{
context.Error(inputPropertyName, $"Input '{inputPropertyName.Value}' is not allowed");
continue;
}
overrideInputs.Add(inputPropertyName.Value);
mergedInputs.Add(pair.Key, pair.Value);
}
}
// Merge defaults
if (inputs?.Count > 0)
{
foreach (var pair in inputs)
{
var inputPropertyName = pair.Key.AssertString("template file inputs property");
if (!overrideInputs.Contains(inputPropertyName.Value))
{
mergedInputs.Add(pair.Key, pair.Value);
}
}
}
// Build the scope object
var result = new MappingToken(null, null, null);
var namePropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Name);
var namePropertyValue = new StringToken(null, null, null, TemplateScope);
result.Add(namePropertyName, namePropertyValue);
if (mergedInputs.Count > 0)
{
var inputsPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Inputs);
result.Add(inputsPropertyName, mergedInputs);
}
if (outputs?.Count > 0)
{
var outputsPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Outputs);
result.Add(outputsPropertyName, outputs);
}
return result;
}
internal static Boolean TryCreate(
MappingToken mapping,
out TemplateReference reference)
{
var scope = default(String);
var id = default(String);
var generatedId = default(String);
var templatePath = default(StringToken);
var inputs = default(MappingToken);
foreach (var property in mapping)
{
var propertyName = property.Key.AssertString("candidate template reference property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Scope:
var scopeStringToken = property.Value.AssertString("step scope");
scope = scopeStringToken.Value;
break;
case PipelineTemplateConstants.Id:
var idStringToken = property.Value.AssertString("step id");
id = idStringToken.Value;
break;
case PipelineTemplateConstants.GeneratedId:
var generatedIdStringToken = property.Value.AssertString("step generated id");
generatedId = generatedIdStringToken.Value;
break;
case PipelineTemplateConstants.Template:
templatePath = property.Value.AssertString("step template reference");
break;
case PipelineTemplateConstants.Inputs:
inputs = property.Value.AssertMapping("step template reference inputs");
break;
}
}
if (templatePath != null)
{
reference = new TemplateReference(scope, id, generatedId, templatePath, inputs);
return true;
}
else
{
reference = null;
return false;
}
}
private Boolean m_isGeneratedId;
}
}

View File

@@ -1,572 +0,0 @@
using System;
using System.Globalization;
using System.IO;
using System.Linq;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using YamlDotNet.Core;
using YamlDotNet.Core.Events;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Converts a YAML file into a TemplateToken
/// </summary>
internal sealed class YamlObjectReader : IObjectReader
{
internal YamlObjectReader(
Int32? fileId,
TextReader input)
{
m_fileId = fileId;
m_parser = new Parser(input);
}
public Boolean AllowLiteral(out LiteralToken value)
{
if (EvaluateCurrent() is Scalar scalar)
{
// Tag specified
if (!String.IsNullOrEmpty(scalar.Tag))
{
// String tag
if (String.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
// Not plain style
if (scalar.Style != ScalarStyle.Plain)
{
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
}
// Boolean, Float, Integer, or Null
switch (scalar.Tag)
{
case c_booleanTag:
value = ParseBoolean(scalar);
break;
case c_floatTag:
value = ParseFloat(scalar);
break;
case c_integerTag:
value = ParseInteger(scalar);
break;
case c_nullTag:
value = ParseNull(scalar);
break;
default:
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
}
MoveNext();
return true;
}
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
if (scalar.Style == ScalarStyle.Plain)
{
if (MatchNull(scalar, out var nullToken))
{
value = nullToken;
}
else if (MatchBoolean(scalar, out var booleanToken))
{
value = booleanToken;
}
else if (MatchInteger(scalar, out var numberToken) ||
MatchFloat(scalar, out numberToken))
{
value = numberToken;
}
else
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
}
MoveNext();
return true;
}
// Otherwise assume string
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceStart(out SequenceToken value)
{
if (EvaluateCurrent() is SequenceStart sequenceStart)
{
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceEnd()
{
if (EvaluateCurrent() is SequenceEnd)
{
MoveNext();
return true;
}
return false;
}
public Boolean AllowMappingStart(out MappingToken value)
{
if (EvaluateCurrent() is MappingStart mappingStart)
{
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowMappingEnd()
{
if (EvaluateCurrent() is MappingEnd)
{
MoveNext();
return true;
}
return false;
}
/// <summary>
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
/// </summary>
public void ValidateEnd()
{
if (EvaluateCurrent() is DocumentEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document end parse event");
}
if (EvaluateCurrent() is StreamEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream end parse event");
}
if (MoveNext())
{
throw new InvalidOperationException("Expected end of parse events");
}
}
/// <summary>
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
/// </summary>
public void ValidateStart()
{
if (EvaluateCurrent() != null)
{
throw new InvalidOperationException("Unexpected parser state");
}
if (!MoveNext())
{
throw new InvalidOperationException("Expected a parse event");
}
if (EvaluateCurrent() is StreamStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream start parse event");
}
if (EvaluateCurrent() is DocumentStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document start parse event");
}
}
private ParsingEvent EvaluateCurrent()
{
if (m_current == null)
{
m_current = m_parser.Current;
if (m_current != null)
{
if (m_current is Scalar scalar)
{
// Verify not using achors
if (scalar.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
}
}
else if (m_current is MappingStart mappingStart)
{
// Verify not using achors
if (mappingStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
}
}
else if (m_current is SequenceStart sequenceStart)
{
// Verify not using achors
if (sequenceStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
}
}
else if (!(m_current is MappingEnd) &&
!(m_current is SequenceEnd) &&
!(m_current is DocumentStart) &&
!(m_current is DocumentEnd) &&
!(m_current is StreamStart) &&
!(m_current is StreamEnd))
{
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
}
}
}
return m_current;
}
private Boolean MoveNext()
{
m_current = null;
return m_parser.MoveNext();
}
private BooleanToken ParseBoolean(Scalar scalar)
{
if (MatchBoolean(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_booleanTag); // throws
return default;
}
private NumberToken ParseFloat(Scalar scalar)
{
if (MatchFloat(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_floatTag); // throws
return default;
}
private NumberToken ParseInteger(Scalar scalar)
{
if (MatchInteger(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_integerTag); // throws
return default;
}
private NullToken ParseNull(Scalar scalar)
{
if (MatchNull(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_nullTag); // throws
return default;
}
private Boolean MatchBoolean(
Scalar scalar,
out BooleanToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "true":
case "True":
case "TRUE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
return true;
case "false":
case "False":
case "FALSE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
return true;
}
value = default;
return false;
}
private Boolean MatchFloat(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
switch (str)
{
case ".inf":
case ".Inf":
case ".INF":
case "+.inf":
case "+.Inf":
case "+.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
return true;
case "-.inf":
case "-.Inf":
case "-.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
return true;
case ".nan":
case ".NaN":
case ".NAN":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
return true;
}
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
// Skip leading sign
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
// Check for integer portion
var length = str.Length;
var hasInteger = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasInteger = true;
index++;
}
// Check for decimal point
var hasDot = false;
if (index < length && str[index] == '.')
{
hasDot = true;
index++;
}
// Check for decimal portion
var hasDecimal = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasDecimal = true;
index++;
}
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
if ((hasDot && hasDecimal) || hasInteger)
{
// Check for end
if (index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
// Check [eE][-+]?[0-9]
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
{
index++;
// Skip sign
if (index < length && (str[index] == '-' || str[index] == '+'))
{
index++;
}
// Check for exponent
var hasExponent = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasExponent = true;
index++;
}
// Check for end
if (hasExponent && index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
}
}
}
value = default;
return false;
}
private Boolean MatchInteger(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [0-9]+
var firstChar = str[0];
if (firstChar >= '0' && firstChar <= '9' &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for (-|+)[0-9]+
else if ((firstChar == '-' || firstChar == '+') &&
str.Length > 1 &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0x[0-9a-fA-F]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'x' &&
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
{
// Try parse
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0o[0-9]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'o' &&
str.Skip(2).All(x => x >= '0' && x <= '7'))
{
// Try parse
var integerValue = default(Int32);
try
{
integerValue = Convert.ToInt32(str.Substring(2), 8);
}
// Otherwise exceeds range
catch (Exception)
{
ThrowInvalidValue(scalar, c_integerTag); // throws
}
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
}
value = default;
return false;
}
private Boolean MatchNull(
Scalar scalar,
out NullToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "":
case "null":
case "Null":
case "NULL":
case "~":
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
return true;
}
value = default;
return false;
}
private void ThrowInvalidValue(
Scalar scalar,
String tag)
{
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'");
}
private const String c_booleanTag = "tag:yaml.org,2002:bool";
private const String c_floatTag = "tag:yaml.org,2002:float";
private const String c_integerTag = "tag:yaml.org,2002:int";
private const String c_nullTag = "tag:yaml.org,2002:null";
private const String c_stringTag = "tag:yaml.org,2002:string";
private readonly Int32? m_fileId;
private readonly Parser m_parser;
private ParsingEvent m_current;
}
}

View File

@@ -1,73 +0,0 @@
using System;
using System.Globalization;
using System.IO;
using GitHub.DistributedTask.ObjectTemplating;
using YamlDotNet.Core.Events;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Converts a TemplateToken into YAML
/// </summary>
internal sealed class YamlObjectWriter : IObjectWriter
{
internal YamlObjectWriter(StringWriter writer)
{
m_emitter = new YamlDotNet.Core.Emitter(writer);
}
public void WriteString(String value)
{
m_emitter.Emit(new Scalar(value ?? String.Empty));
}
public void WriteBoolean(Boolean value)
{
m_emitter.Emit(new Scalar(value ? "true" : "false"));
}
public void WriteNumber(Double value)
{
m_emitter.Emit(new Scalar(value.ToString("G15", CultureInfo.InvariantCulture)));
}
public void WriteNull()
{
m_emitter.Emit(new Scalar("null"));
}
public void WriteSequenceStart()
{
m_emitter.Emit(new SequenceStart(null, null, true, SequenceStyle.Block));
}
public void WriteSequenceEnd()
{
m_emitter.Emit(new SequenceEnd());
}
public void WriteMappingStart()
{
m_emitter.Emit(new MappingStart());
}
public void WriteMappingEnd()
{
m_emitter.Emit(new MappingEnd());
}
public void WriteStart()
{
m_emitter.Emit(new StreamStart());
m_emitter.Emit(new DocumentStart());
}
public void WriteEnd()
{
m_emitter.Emit(new DocumentEnd(isImplicit: true));
m_emitter.Emit(new StreamEnd());
}
private readonly YamlDotNet.Core.IEmitter m_emitter;
}
}

View File

@@ -1,251 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Loads a YAML file, and returns the parsed TemplateToken
/// </summary>
internal sealed class YamlTemplateLoader
{
public YamlTemplateLoader(
ParseOptions parseOptions,
IFileProvider fileProvider)
{
m_parseOptions = new ParseOptions(parseOptions);
m_fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider));
}
public TemplateToken LoadFile(
TemplateContext context,
Int32? rootFileId,
String scope,
String path,
String templateType)
{
if (context.Errors.Count > 0)
{
throw new InvalidOperationException("Expected error count to be 0 when attempting to load a new file");
}
// Is entry file?
var isEntryFile = m_referencedFiles.Count == 0;
// Root the path
path = m_fileProvider.ResolvePath(null, path);
// Validate max files
m_referencedFiles.Add(path);
if (m_parseOptions.MaxFiles > 0 && m_referencedFiles.Count > m_parseOptions.MaxFiles)
{
throw new InvalidOperationException($"The maximum file count of {m_parseOptions.MaxFiles} has been exceeded");
}
// Get the file ID
var fileId = context.GetFileId(path);
// Check the cache
if (!m_cache.TryGetValue(path, out String fileContent))
{
// Fetch the file
context.CancellationToken.ThrowIfCancellationRequested();
fileContent = m_fileProvider.GetFileContent(path);
// Validate max file size
if (fileContent.Length > m_parseOptions.MaxFileSize)
{
throw new InvalidOperationException($"The maximum file size of {m_parseOptions.MaxFileSize} characters has been exceeded");
}
// Cache
m_cache[path] = fileContent;
}
// Deserialize
var token = default(TemplateToken);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(context, templateType, yamlObjectReader, fileId, out _);
}
// Trace
if (!isEntryFile)
{
context.TraceWriter.Info(String.Empty);
}
context.TraceWriter.Info("# ");
context.TraceWriter.Info("# {0}", path);
context.TraceWriter.Info("# ");
// Validate ref names
if (context.Errors.Count == 0)
{
switch (templateType)
{
case PipelineTemplateConstants.WorkflowRoot:
ValidateWorkflow(context, scope, token);
break;
case PipelineTemplateConstants.StepsTemplateRoot:
var stepsTemplate = token.AssertMapping("steps template");
foreach (var stepsTemplateProperty in stepsTemplate)
{
var stepsTemplatePropertyName = stepsTemplateProperty.Key.AssertString("steps template property name");
switch (stepsTemplatePropertyName.Value)
{
case PipelineTemplateConstants.Steps:
ValidateSteps(context, scope, stepsTemplateProperty.Value);
break;
}
}
break;
default:
throw new NotImplementedException($"Unexpected template type '{templateType}' when loading yaml file");
}
}
return token;
}
private void ValidateWorkflow(
TemplateContext context,
String scope,
TemplateToken token)
{
var workflow = token.AssertMapping("workflow");
foreach (var workflowProperty in workflow)
{
var workflowPropertyName = workflowProperty.Key.AssertString("workflow property name");
switch (workflowPropertyName.Value)
{
case PipelineTemplateConstants.Jobs:
case PipelineTemplateConstants.Workflow:
var jobs = workflowProperty.Value.AssertMapping("workflow property value");
foreach (var jobsProperty in jobs)
{
var job = jobsProperty.Value.AssertMapping("jobs property value");
foreach (var jobProperty in job)
{
var jobPropertyName = jobProperty.Key.AssertString("job property name");
switch (jobPropertyName.Value)
{
case PipelineTemplateConstants.Steps:
ValidateSteps(context, scope, jobProperty.Value);
break;
}
}
}
break;
}
}
}
private void ValidateSteps(
TemplateContext context,
String scope,
TemplateToken token)
{
var nameBuilder = new ReferenceNameBuilder();
var steps = token.AssertSequence("steps");
var needsReferenceName = new List<MappingToken>();
foreach (var stepsItem in steps)
{
var step = stepsItem.AssertMapping("steps item");
var isTemplateReference = false;
var hasReferenceName = false;
foreach (var stepProperty in step)
{
var stepPropertyKey = stepProperty.Key.AssertString("step property name");
switch (stepPropertyKey.Value)
{
// Validate reference names
case PipelineTemplateConstants.Id:
var referenceNameLiteral = stepProperty.Value.AssertString("step ID");
var referenceName = referenceNameLiteral.Value;
if (String.IsNullOrEmpty(referenceName))
{
continue;
}
if (!nameBuilder.TryAddKnownName(referenceName, out var error))
{
context.Error(referenceNameLiteral, error);
}
hasReferenceName = true;
break;
case PipelineTemplateConstants.Template:
isTemplateReference = true;
break;
}
}
// No reference name
if (isTemplateReference && !hasReferenceName)
{
needsReferenceName.Add(step);
}
// Stamp the scope
if (!String.IsNullOrEmpty(scope))
{
var scopePropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Scope);
var scopePropertyValue = new StringToken(null, null, null, scope);
step.Add(scopePropertyName, scopePropertyValue);
context.Memory.AddBytes(scopePropertyName);
context.Memory.AddBytes(scopePropertyValue);
}
}
// Generate reference names
if (needsReferenceName.Count > 0 && context.Errors.Count == 0)
{
foreach (var step in needsReferenceName)
{
// Get the template path
var templatePath = default(String);
foreach (var stepProperty in step)
{
var stepPropertyKey = stepProperty.Key.AssertString("step property name");
switch (stepPropertyKey.Value)
{
case PipelineTemplateConstants.Template:
var templateStringToken = stepProperty.Value.AssertString("step template path");
templatePath = templateStringToken.Value;
break;
}
}
// Generate reference name
if (!String.IsNullOrEmpty(templatePath))
{
nameBuilder.AppendSegment(templatePath);
var generatedIdPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.GeneratedId);
var generatedIdPropertyValue = new StringToken(null, null, null, nameBuilder.Build());
step.Add(generatedIdPropertyName, generatedIdPropertyValue);
context.Memory.AddBytes(generatedIdPropertyName);
context.Memory.AddBytes(generatedIdPropertyValue);
}
}
}
}
/// <summary>
/// Cache of file content
/// </summary>
private readonly Dictionary<String, String> m_cache = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
private readonly IFileProvider m_fileProvider;
private readonly ParseOptions m_parseOptions;
/// <summary>
/// Tracks unique file references
/// </summary>
private readonly HashSet<String> m_referencedFiles = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
}
}