GitHub Actions Runner

This commit is contained in:
Tingluo Huang
2019-10-10 00:52:42 -04:00
commit c8afc84840
1255 changed files with 198670 additions and 0 deletions

View File

@@ -0,0 +1,24 @@
using System;
using System.ComponentModel;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.Pipelines.Runtime;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Named-value node used when evaluating graph-node conditions
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
internal sealed class GraphConditionNamedValue<TInstance> : NamedValue where TInstance : IGraphNodeInstance
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
var graphContext = context.State as GraphExecutionContext<TInstance>;
graphContext.Data.TryGetValue(Name, out var result);
return result;
}
}
}

View File

@@ -0,0 +1,13 @@
using System;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public interface IFileProvider
{
String GetFileContent(String path);
String ResolvePath(String defaultRoot, String path);
}
}

View File

@@ -0,0 +1,59 @@
using System;
using System.Collections.Generic;
using System.Globalization;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class JobDisplayNameBuilder
{
public JobDisplayNameBuilder(String jobFactoryDisplayName)
{
if (!String.IsNullOrEmpty(jobFactoryDisplayName))
{
m_jobFactoryDisplayName = jobFactoryDisplayName;
m_segments = new List<String>();
}
}
public void AppendSegment(String value)
{
if (String.IsNullOrEmpty(value) || m_segments == null)
{
return;
}
m_segments.Add(value);
}
public String Build()
{
if (String.IsNullOrEmpty(m_jobFactoryDisplayName))
{
return null;
}
var displayName = default(String);
if (m_segments.Count == 0)
{
displayName = m_jobFactoryDisplayName;
}
else
{
var joinedSegments = String.Join(", ", m_segments);
displayName = String.Format(CultureInfo.InvariantCulture, "{0} ({1})", m_jobFactoryDisplayName, joinedSegments);
}
const Int32 maxDisplayNameLength = 100;
if (displayName.Length > maxDisplayNameLength)
{
displayName = displayName.Substring(0, maxDisplayNameLength - 3) + "...";
}
m_segments.Clear();
return displayName;
}
private readonly String m_jobFactoryDisplayName;
private readonly List<String> m_segments;
}
}

View File

@@ -0,0 +1,234 @@
using System;
using System.Collections.Generic;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using Newtonsoft.Json.Linq;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class JsonObjectReader : IObjectReader
{
internal JsonObjectReader(
Int32? fileId,
String input)
{
m_fileId = fileId;
var token = JToken.Parse(input);
m_enumerator = GetEvents(token, true).GetEnumerator();
m_enumerator.MoveNext();
}
public Boolean AllowLiteral(out LiteralToken literal)
{
var current = m_enumerator.Current;
switch (current.Type)
{
case ParseEventType.Null:
literal = new NullToken(m_fileId, current.Line, current.Column);
m_enumerator.MoveNext();
return true;
case ParseEventType.Boolean:
literal = new BooleanToken(m_fileId, current.Line, current.Column, (Boolean)current.Value);
m_enumerator.MoveNext();
return true;
case ParseEventType.Number:
literal = new NumberToken(m_fileId, current.Line, current.Column, (Double)current.Value);
m_enumerator.MoveNext();
return true;
case ParseEventType.String:
literal = new StringToken(m_fileId, current.Line, current.Column, (String)current.Value);
m_enumerator.MoveNext();
return true;
}
literal = null;
return false;
}
public Boolean AllowSequenceStart(out SequenceToken sequence)
{
var current = m_enumerator.Current;
if (current.Type == ParseEventType.SequenceStart)
{
sequence = new SequenceToken(m_fileId, current.Line, current.Column);
m_enumerator.MoveNext();
return true;
}
sequence = null;
return false;
}
public Boolean AllowSequenceEnd()
{
if (m_enumerator.Current.Type == ParseEventType.SequenceEnd)
{
m_enumerator.MoveNext();
return true;
}
return false;
}
public Boolean AllowMappingStart(out MappingToken mapping)
{
var current = m_enumerator.Current;
if (current.Type == ParseEventType.MappingStart)
{
mapping = new MappingToken(m_fileId, current.Line, current.Column);
m_enumerator.MoveNext();
return true;
}
mapping = null;
return false;
}
public Boolean AllowMappingEnd()
{
if (m_enumerator.Current.Type == ParseEventType.MappingEnd)
{
m_enumerator.MoveNext();
return true;
}
return false;
}
/// <summary>
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
/// </summary>
public void ValidateEnd()
{
if (m_enumerator.Current.Type == ParseEventType.DocumentEnd)
{
m_enumerator.MoveNext();
return;
}
throw new InvalidOperationException("Expected end of reader");
}
/// <summary>
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
/// </summary>
public void ValidateStart()
{
if (m_enumerator.Current.Type == ParseEventType.DocumentStart)
{
m_enumerator.MoveNext();
return;
}
throw new InvalidOperationException("Expected start of reader");
}
private IEnumerable<ParseEvent> GetEvents(
JToken token,
Boolean root = false)
{
if (root)
{
yield return new ParseEvent(0, 0, ParseEventType.DocumentStart);
}
var lineInfo = token as Newtonsoft.Json.IJsonLineInfo;
var line = lineInfo.LineNumber;
var column = lineInfo.LinePosition;
switch (token.Type)
{
case JTokenType.Null:
yield return new ParseEvent(line, column, ParseEventType.Null, null);
break;
case JTokenType.Boolean:
yield return new ParseEvent(line, column, ParseEventType.Boolean, token.ToObject<Boolean>());
break;
case JTokenType.Float:
case JTokenType.Integer:
yield return new ParseEvent(line, column, ParseEventType.Number, token.ToObject<Double>());
break;
case JTokenType.String:
yield return new ParseEvent(line, column, ParseEventType.String, token.ToObject<String>());
break;
case JTokenType.Array:
yield return new ParseEvent(line, column, ParseEventType.SequenceStart);
foreach (var item in (token as JArray))
{
foreach (var e in GetEvents(item))
{
yield return e;
}
}
yield return new ParseEvent(line, column, ParseEventType.SequenceEnd);
break;
case JTokenType.Object:
yield return new ParseEvent(line, column, ParseEventType.MappingStart);
foreach (var pair in (token as JObject))
{
yield return new ParseEvent(line, column, ParseEventType.String, pair.Key ?? String.Empty);
foreach (var e in GetEvents(pair.Value))
{
yield return e;
}
}
yield return new ParseEvent(line, column, ParseEventType.MappingEnd);
break;
default:
throw new NotSupportedException($"Unexpected JTokenType {token.Type}");
}
if (root)
{
yield return new ParseEvent(0, 0, ParseEventType.DocumentEnd);
}
}
private struct ParseEvent
{
public ParseEvent(
Int32 line,
Int32 column,
ParseEventType type,
Object value = null)
{
Line = line;
Column = column;
Type = type;
Value = value;
}
public readonly Int32 Line;
public readonly Int32 Column;
public readonly ParseEventType Type;
public readonly Object Value;
}
private enum ParseEventType
{
None = 0,
Null,
Boolean,
Number,
String,
SequenceStart,
SequenceEnd,
MappingStart,
MappingEnd,
DocumentStart,
DocumentEnd,
}
private IEnumerator<ParseEvent> m_enumerator;
private Int32? m_fileId;
}
}

View File

@@ -0,0 +1,445 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class MatrixBuilder
{
internal MatrixBuilder(
TemplateContext context,
String jobFactoryDisplayName)
{
m_context = context;
m_jobFactoryDisplayName = jobFactoryDisplayName;
}
internal void AddVector(
String name,
SequenceToken vector)
{
m_vectors.Add(name, vector.ToContextData());
}
internal DictionaryContextData Vectors => m_vectors;
internal void Exclude(SequenceToken exclude)
{
m_excludeSequence = exclude;
}
internal void Include(SequenceToken include)
{
m_includeSequence = include;
}
internal IEnumerable<StrategyConfiguration> Build()
{
if (m_vectors.Count > 0)
{
// Parse includes/excludes
var include = new MatrixInclude(m_context, m_vectors, m_includeSequence);
var exclude = new MatrixExclude(m_context, m_vectors, m_excludeSequence);
// Calculate the cross product size
var productSize = 1;
foreach (var vectorPair in m_vectors)
{
checked
{
var vector = vectorPair.Value.AssertArray("vector");
productSize *= vector.Count;
}
}
var nameBuilder = new ReferenceNameBuilder();
var displayNameBuilder = new JobDisplayNameBuilder(m_jobFactoryDisplayName);
// Cross product
for (var productIndex = 0; productIndex < productSize; productIndex++)
{
// Matrix
var matrix = new DictionaryContextData();
var blockSize = productSize;
foreach (var vectorPair in m_vectors)
{
var vectorName = vectorPair.Key;
var vector = vectorPair.Value.AssertArray("vector");
blockSize = blockSize / vector.Count;
var vectorIndex = (productIndex / blockSize) % vector.Count;
matrix.Add(vectorName, vector[vectorIndex]);
}
// Exclude
if (exclude.Match(matrix))
{
continue;
}
// New configuration
var configuration = new StrategyConfiguration();
m_context.Memory.AddBytes(TemplateMemory.MinObjectSize);
// Gather segments for name and display name
foreach (var matrixData in matrix.Traverse(omitKeys: true))
{
var segment = default(String);
switch (matrixData?.Type)
{
case PipelineContextDataType.Boolean:
case PipelineContextDataType.Number:
case PipelineContextDataType.String:
segment = matrixData.ToString();
break;
}
if (!String.IsNullOrEmpty(segment))
{
// Name segment
nameBuilder.AppendSegment(segment);
// Display name segment
displayNameBuilder.AppendSegment(segment);
}
}
// Name
configuration.Name = nameBuilder.Build();
m_context.Memory.AddBytes(configuration.Name);
// Display name
configuration.DisplayName = displayNameBuilder.Build();
m_context.Memory.AddBytes(configuration.DisplayName);
// Include
if (include.Match(matrix, out var extra))
{
matrix.Add(extra);
}
// Matrix context
configuration.ContextData.Add(PipelineTemplateConstants.Matrix, matrix);
m_context.Memory.AddBytes(PipelineTemplateConstants.Matrix);
m_context.Memory.AddBytes(matrix, traverse: true);
// Add configuration
yield return configuration;
}
}
}
private sealed class MatrixInclude
{
public MatrixInclude(
TemplateContext context,
DictionaryContextData vectors,
SequenceToken includeSequence)
{
// Convert to excludes sets
if (includeSequence?.Count > 0)
{
foreach (var includeItem in includeSequence)
{
var includeMapping = includeItem.AssertMapping("matrix includes item");
// Distinguish filters versus extra
var filter = new MappingToken(null, null, null);
var extra = new DictionaryContextData();
foreach (var includePair in includeMapping)
{
var includeKeyLiteral = includePair.Key.AssertString("matrix include item key");
if (vectors.ContainsKey(includeKeyLiteral.Value))
{
filter.Add(includeKeyLiteral, includePair.Value);
}
else
{
extra.Add(includeKeyLiteral.Value, includePair.Value.ToContextData());
}
}
// At least one filter
if (filter.Count == 0)
{
context.Error(includeMapping, $"Matrix include mapping does not contain any filters");
continue;
}
// At least one extra
if (extra.Count == 0)
{
context.Error(includeMapping, $"Matrix include mapping does not contain any extra values to include");
continue;
}
// Add filter
m_filters.Add(new MatrixIncludeFilter(filter, extra));
}
}
}
public Boolean Match(
DictionaryContextData matrix,
out DictionaryContextData extra)
{
extra = default(DictionaryContextData);
foreach (var filter in m_filters)
{
if (filter.Match(matrix, out var items))
{
if (extra == null)
{
extra = new DictionaryContextData();
}
foreach (var pair in items)
{
extra[pair.Key] = pair.Value;
}
}
}
return extra != null;
}
private readonly List<MatrixIncludeFilter> m_filters = new List<MatrixIncludeFilter>();
}
private sealed class MatrixIncludeFilter : MatrixFilter
{
public MatrixIncludeFilter(
MappingToken filter,
DictionaryContextData extra)
: base(filter)
{
m_extra = extra;
}
public Boolean Match(
DictionaryContextData matrix,
out DictionaryContextData extra)
{
if (base.Match(matrix))
{
extra = m_extra;
return true;
}
extra = null;
return false;
}
private readonly DictionaryContextData m_extra;
}
private sealed class MatrixExclude
{
public MatrixExclude(
TemplateContext context,
DictionaryContextData vectors,
SequenceToken excludeSequence)
{
// Convert to excludes sets
if (excludeSequence?.Count > 0)
{
foreach (var excludeItem in excludeSequence)
{
var excludeMapping = excludeItem.AssertMapping("matrix excludes item");
// Check empty
if (excludeMapping.Count == 0)
{
context.Error(excludeMapping, $"Matrix exclude filter must not be empty");
continue;
}
// Validate first-level keys
foreach (var excludePair in excludeMapping)
{
var excludeKey = excludePair.Key.AssertString("matrix excludes item key");
if (!vectors.ContainsKey(excludeKey.Value))
{
context.Error(excludeKey, $"Matrix exclude key '{excludeKey.Value}' does not match any key within the matrix");
continue;
}
}
// Add filter
m_filters.Add(new MatrixExcludeFilter(excludeMapping));
}
}
}
public Boolean Match(DictionaryContextData matrix)
{
foreach (var filter in m_filters)
{
if (filter.Match(matrix))
{
return true;
}
}
return false;
}
private readonly List<MatrixExcludeFilter> m_filters = new List<MatrixExcludeFilter>();
}
private sealed class MatrixExcludeFilter : MatrixFilter
{
public MatrixExcludeFilter(MappingToken filter)
: base(filter)
{
}
public new Boolean Match(DictionaryContextData matrix)
{
return base.Match(matrix);
}
}
private abstract class MatrixFilter
{
protected MatrixFilter(MappingToken matrixFilter)
{
var state = new MappingState(null, matrixFilter);
while (state != null)
{
if (state.MoveNext())
{
var value = state.Mapping[state.Index].Value;
if (value is LiteralToken literal)
{
AddExpression(state, literal);
}
else
{
var mapping = state.Mapping[state.Index].Value.AssertMapping("matrix filter");
state = new MappingState(state, mapping);
}
}
else
{
state = state.Parent;
}
}
}
protected Boolean Match(DictionaryContextData matrix)
{
if (matrix.Count == 0)
{
throw new InvalidOperationException("Matrix filter cannot be empty");
}
foreach (var expression in m_expressions)
{
var result = expression.Evaluate(null, null, matrix, null);
if (result.IsFalsy)
{
return false;
}
}
return true;
}
private void AddExpression(
MappingState state,
LiteralToken literal)
{
var expressionLiteral = default(String);
switch (literal.Type)
{
case TokenType.Null:
expressionLiteral = ExpressionConstants.Null;
break;
case TokenType.Boolean:
var booleanToken = literal as BooleanToken;
expressionLiteral = booleanToken.Value ? ExpressionConstants.True : ExpressionConstants.False;
break;
case TokenType.Number:
var numberToken = literal as NumberToken;
expressionLiteral = String.Format(CultureInfo.InvariantCulture, ExpressionConstants.NumberFormat, numberToken.Value);
break;
case TokenType.String:
var stringToken = literal as StringToken;
expressionLiteral = $"'{ExpressionUtility.StringEscape(stringToken.Value)}'";
break;
default:
throw new NotSupportedException($"Unexpected literal type '{literal.Type}'");
}
var str = $"{state.Path} == {expressionLiteral}";
var parser = new ExpressionParser();
var expression = parser.CreateTree(str, null, s_matrixFilterNamedValues, null);
m_expressions.Add(expression);
}
private static readonly INamedValueInfo[] s_matrixFilterNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<MatrixNamedValue>(PipelineTemplateConstants.Matrix),
};
private readonly List<IExpressionNode> m_expressions = new List<IExpressionNode>();
}
private sealed class MappingState
{
public MappingState(
MappingState parent,
MappingToken mapping)
{
Parent = parent;
Mapping = mapping;
Index = -1;
}
public Boolean MoveNext()
{
if (++Index < Mapping.Count)
{
var keyLiteral = Mapping[Index].Key.AssertString("matrix filter key");
var parentPath = Parent?.Path ?? PipelineTemplateConstants.Matrix;
Path = $"{parentPath}['{ExpressionUtility.StringEscape(keyLiteral.Value)}']";
return true;
}
else
{
return false;
}
}
public MappingState Parent;
public MappingToken Mapping;
public Int32 Index;
public String Path;
}
private sealed class MatrixNamedValue : NamedValue
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
return context.State;
}
}
private readonly TemplateContext m_context;
private readonly String m_jobFactoryDisplayName;
private readonly DictionaryContextData m_vectors = new DictionaryContextData();
private SequenceToken m_excludeSequence;
private SequenceToken m_includeSequence;
}
}

View File

@@ -0,0 +1,45 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Reflection;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class ParseOptions
{
public ParseOptions()
{
}
internal ParseOptions(ParseOptions copy)
{
MaxFiles = copy.MaxFiles;
MaxFileSize = copy.MaxFileSize;
MaxResultSize = copy.MaxResultSize;
}
public Int32 MaxDepth => 50;
/// <summary>
/// Gets the maximum error message length before the message will be truncated.
/// </summary>
public Int32 MaxErrorMessageLength => 500;
/// <summary>
/// Gets the maximum number of errors that can be recorded when parsing a pipeline.
/// </summary>
public Int32 MaxErrors => 10;
/// <summary>
/// Gets or sets the maximum number of files that can be loaded when parsing a pipeline. Zero or less is treated as infinite.
/// </summary>
public Int32 MaxFiles { get; set; } = 50;
public Int32 MaxFileSize { get; set; } = 1024 * 1024; // 1 mb
public Int32 MaxParseEvents => 1000000; // 1 million
public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb
}
}

View File

@@ -0,0 +1,30 @@
using System;
using System.IO;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class ParseResult
{
public TemplateContext Context { get; set; }
public TemplateToken Value { get; set; }
public String ToYaml()
{
if (Value == null)
{
return null;
}
// Serialize
using (var stringWriter = new StringWriter())
{
TemplateWriter.Write(new YamlObjectWriter(stringWriter), Value);
stringWriter.Flush();
return stringWriter.ToString();
}
}
}
}

View File

@@ -0,0 +1,82 @@
using System;
using System.ComponentModel;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class PipelineTemplateConstants
{
public const String Always = "always";
public const String BooleanStepsContext = "boolean-steps-context";
public const String CancelTimeoutMinutes = "cancel-timeout-minutes";
public const String Cancelled = "cancelled";
public const String Checkout = "checkout";
public const String Clean = "clean";
public const String Container = "container";
public const String ContinueOnError = "continue-on-error";
public const String Env = "env";
public const String Event = "event";
public const String EventPattern = "github.event";
public const String Exclude = "exclude";
public const String FailFast = "fail-fast";
public const String Failure = "failure";
public const String FetchDepth = "fetch-depth";
public const String GeneratedId = "generated-id";
public const String GitHub = "github";
public const String Id = "id";
public const String If = "if";
public const String Image = "image";
public const String Include = "include";
public const String Inputs = "inputs";
public const String Job = "job";
public const String Jobs = "jobs";
public const String Lfs = "lfs";
public const String Matrix = "matrix";
public const String MaxParallel = "max-parallel";
public const String Name = "name";
public const String Needs = "needs";
public const String NumberStepsContext = "number-steps-context";
public const String NumberStrategyContext = "number-strategy-context";
public const String On = "on";
public const String Options = "options";
public const String Outputs = "outputs";
public const String OutputsPattern = "needs.*.outputs";
public const String Path = "path";
public const String Pool = "pool";
public const String Ports = "ports";
public const String Result = "result";
public const String RunDisplayPrefix = "Run ";
public const String Run = "run";
public const String Runner = "runner";
public const String RunsOn = "runs-on";
public const String Scope = "scope";
public const String Scopes = "scopes";
public const String Secrets = "secrets";
public const String Services = "services";
public const String Shell = "shell";
public const String Skipped = "skipped";
public const String StepEnv = "step-env";
public const String Steps = "steps";
public const String StepsScopeInputs = "steps-scope-inputs";
public const String StepsScopeOutputs = "steps-scope-outputs";
public const String StepsTemplateRoot = "steps-template-root";
public const String StepWith = "step-with";
public const String Strategy = "strategy";
public const String StringStepsContext = "string-steps-context";
public const String StringStrategyContext = "string-strategy-context";
public const String Submodules = "submodules";
public const String Success = "success";
public const String Template = "template";
public const String TimeoutMinutes = "timeout-minutes";
public const String Token = "token";
public const String Uses = "uses";
public const String VmImage = "vmImage";
public const String Volumes = "volumes";
public const String With = "with";
public const String Workflow = "workflow";
public const String Workflow_1_0 = "workflow-v1.0";
public const String WorkflowRoot = "workflow-root";
public const String WorkingDirectory = "working-directory";
public const String Workspace = "workspace";
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,526 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Threading;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using ExpressionConstants = GitHub.DistributedTask.Expressions2.ExpressionConstants;
using ITraceWriter = GitHub.DistributedTask.ObjectTemplating.ITraceWriter;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public class PipelineTemplateEvaluator
{
public PipelineTemplateEvaluator(
ITraceWriter trace,
TemplateSchema schema)
{
if (!String.Equals(schema.Version, PipelineTemplateConstants.Workflow_1_0, StringComparison.Ordinal))
{
throw new NotSupportedException($"Unexpected template schema version '{schema.Version}'");
}
m_trace = trace;
m_schema = schema;
}
public Int32 MaxDepth => 50;
/// <summary>
/// Gets the maximum error message length before the message will be truncated.
/// </summary>
public Int32 MaxErrorMessageLength => 500;
/// <summary>
/// Gets the maximum number of errors that can be recorded when parsing a pipeline.
/// </summary>
public Int32 MaxErrors => 10;
public Int32 MaxEvents => 1000000; // 1 million
public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb
public StrategyResult EvaluateStrategy(
TemplateToken token,
DictionaryContextData contextData,
String jobFactoryDisplayName)
{
var result = new StrategyResult();
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Strategy, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToStrategy(context, token, jobFactoryDisplayName);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
if (result.Configurations.Count == 0)
{
var configuration = new StrategyConfiguration
{
Name = PipelineConstants.DefaultJobName,
DisplayName = new JobDisplayNameBuilder(jobFactoryDisplayName).Build(),
};
configuration.ContextData.Add(PipelineTemplateConstants.Matrix, null);
configuration.ContextData.Add(
PipelineTemplateConstants.Strategy,
new DictionaryContextData
{
{
"fail-fast",
new BooleanContextData(result.FailFast)
},
{
"job-index",
new NumberContextData(0)
},
{
"job-total",
new NumberContextData(1)
},
{
"max-parallel",
new NumberContextData(1)
}
});
result.Configurations.Add(configuration);
}
return result;
}
public String EvaluateJobDisplayName(
TemplateToken token,
DictionaryContextData contextData,
String defaultDisplayName)
{
var result = default(String);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStrategyContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobDisplayName(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return !String.IsNullOrEmpty(result) ? result : defaultDisplayName;
}
public PhaseTarget EvaluateJobTarget(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(PhaseTarget);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.RunsOn, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobTarget(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? throw new InvalidOperationException("Job target cannot be null");
}
public Int32 EvaluateJobTimeout(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStrategyContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? PipelineConstants.DefaultJobTimeoutInMinutes;
}
public Int32 EvaluateJobCancelTimeout(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStrategyContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobCancelTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? PipelineConstants.DefaultJobCancelTimeoutInMinutes;
}
public DictionaryContextData EvaluateStepScopeInputs(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(DictionaryContextData);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeInputs, token, 0, null, omitHeader: true);
context.Errors.Check();
result = token.ToContextData().AssertDictionary("steps scope inputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new DictionaryContextData();
}
public DictionaryContextData EvaluateStepScopeOutputs(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(DictionaryContextData);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeOutputs, token, 0, null, omitHeader: true);
context.Errors.Check();
result = token.ToContextData().AssertDictionary("steps scope outputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new DictionaryContextData();
}
public Boolean EvaluateStepContinueOnError(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(Boolean?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.BooleanStepsContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToStepContinueOnError(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? false;
}
public Dictionary<String, String> EvaluateStepEnvironment(
TemplateToken token,
DictionaryContextData contextData,
StringComparer keyComparer)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepEnv, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToStepEnvironment(context, token, keyComparer);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new Dictionary<String, String>(keyComparer);
}
public Dictionary<String, String> EvaluateStepInputs(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepWith, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToStepInputs(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
}
public Int32 EvaluateStepTimeout(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStepsContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToStepTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? 0;
}
public JobContainer EvaluateJobContainer(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(JobContainer);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Container, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobContainer(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public IList<KeyValuePair<String, JobContainer>> EvaluateJobServiceContainers(
TemplateToken token,
DictionaryContextData contextData)
{
var result = default(List<KeyValuePair<String, JobContainer>>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Services, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobServiceContainers(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Boolean TryEvaluateStepDisplayName(
TemplateToken token,
DictionaryContextData contextData,
out String stepName)
{
stepName = default(String);
var context = CreateContext(contextData);
if (token != null && token.Type != TokenType.Null)
{
// We should only evaluate basic expressions if we are sure we have context on all the Named Values and functions
// Otherwise return and use a default name
if (token is BasicExpressionToken expressionToken)
{
ExpressionNode root = null;
try
{
root = new ExpressionParser().ValidateSyntax(expressionToken.Expression, null) as ExpressionNode;
}
catch (Exception exception)
{
context.Errors.Add(exception);
context.Errors.Check();
}
foreach (var node in root.Traverse())
{
if (node is NamedValue namedValue && !contextData.ContainsKey(namedValue.Name))
{
return false;
}
else if (node is Function function &&
!context.ExpressionFunctions.Any(item => String.Equals(item.Name, function.Name)) &&
!ExpressionConstants.WellKnownFunctions.ContainsKey(function.Name))
{
return false;
}
}
}
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStepsContext, token, 0, null, omitHeader: true);
context.Errors.Check();
stepName = PipelineTemplateConverter.ConvertToStepDisplayName(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return true;
}
private TemplateContext CreateContext(DictionaryContextData contextData)
{
var result = new TemplateContext
{
CancellationToken = CancellationToken.None,
Errors = new TemplateValidationErrors(MaxErrors, MaxErrorMessageLength),
Memory = new TemplateMemory(
maxDepth: MaxDepth,
maxEvents: MaxEvents,
maxBytes: MaxResultSize),
Schema = m_schema,
TraceWriter = m_trace,
};
if (contextData != null)
{
foreach (var pair in contextData)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
}
// Compat for new agent against old server
foreach (var name in s_contextNames)
{
if (!result.ExpressionValues.ContainsKey(name))
{
result.ExpressionValues[name] = null;
}
}
return result;
}
private readonly ITraceWriter m_trace;
private readonly TemplateSchema m_schema;
private readonly String[] s_contextNames = new[]
{
PipelineTemplateConstants.GitHub,
PipelineTemplateConstants.Strategy,
PipelineTemplateConstants.Matrix,
PipelineTemplateConstants.Secrets,
PipelineTemplateConstants.Steps,
PipelineTemplateConstants.Inputs,
PipelineTemplateConstants.Job,
PipelineTemplateConstants.Runner,
PipelineTemplateConstants.Env,
};
}
}

View File

@@ -0,0 +1,239 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Threading;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.ObjectTemplating.Schema;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
using GitHub.DistributedTask.ObjectTemplating;
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class PipelineTemplateParser
{
static PipelineTemplateParser()
{
var schemaFactory = new PipelineTemplateSchemaFactory();
s_schema = schemaFactory.CreateSchema();
}
public PipelineTemplateParser(
ITraceWriter trace,
ParseOptions options)
{
m_trace = trace ?? throw new ArgumentNullException(nameof(trace));
m_parseOptions = new ParseOptions(options ?? throw new ArgumentNullException(nameof(options)));
}
/// <summary>
/// Loads the YAML pipeline template
/// </summary>
/// <exception cref="FileNotFoundException">Thrown when the entry YAML file does not exist</exception>
public PipelineTemplate LoadPipeline(
IFileProvider fileProvider,
RepositoryResource self,
String path,
CancellationToken cancellationToken)
{
fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider));
self = self ?? throw new ArgumentNullException(nameof(self));
var parseResult = LoadPipelineInternal(fileProvider, path, cancellationToken);
return PipelineTemplateConverter.ConvertToPipeline(parseResult.Context, self, parseResult.Value);
}
internal ParseResult LoadPipelineInternal(
IFileProvider fileProvider,
String path,
CancellationToken cancellationToken)
{
// Setup the context
var templateLoader = new YamlTemplateLoader(new ParseOptions(m_parseOptions), fileProvider);
var context = new TemplateContext
{
CancellationToken = cancellationToken,
Errors = new TemplateValidationErrors(m_parseOptions.MaxErrors, m_parseOptions.MaxErrorMessageLength),
Memory = new TemplateMemory(
maxDepth: m_parseOptions.MaxDepth,
maxEvents: m_parseOptions.MaxParseEvents,
maxBytes: m_parseOptions.MaxResultSize),
Schema = s_schema,
TraceWriter = m_trace,
};
// Load the entry file
var token = default(TemplateToken);
try
{
token = templateLoader.LoadFile(context, null, null, path, PipelineTemplateConstants.WorkflowRoot);
}
catch (Exception ex)
{
context.Errors.Add(ex);
}
var result = new ParseResult
{
Context = context,
Value = token,
};
if (token != null && context.Errors.Count == 0)
{
var templateReferenceCount = ResolveWorkflowTemplateReferences(context, templateLoader, token);
if (templateReferenceCount > 0 && context.Errors.Count == 0)
{
context.TraceWriter.Info(String.Empty);
context.TraceWriter.Info("# ");
context.TraceWriter.Info("# Template resolution complete. Final runtime YAML document:");
context.TraceWriter.Info("# ");
context.TraceWriter.Info("{0}", result.ToYaml());
}
}
return result;
}
private Int32 ResolveWorkflowTemplateReferences(
TemplateContext context,
YamlTemplateLoader templateLoader,
TemplateToken token)
{
var resolvedCount = 0;
var workflow = token.AssertMapping("workflow");
foreach (var workflowProperty in workflow)
{
var workflowPropertyName = workflowProperty.Key.AssertString("workflow property");
switch (workflowPropertyName.Value)
{
case PipelineTemplateConstants.Jobs:
resolvedCount += ResolveJobsTemplateReferences(context, templateLoader, workflowProperty.Value);
break;
case PipelineTemplateConstants.Workflow:
resolvedCount += ResolveJobsTemplateReferences(context, templateLoader, workflowProperty.Value);
break;
}
}
return resolvedCount;
}
private Int32 ResolveJobsTemplateReferences(
TemplateContext context,
YamlTemplateLoader templateLoader,
TemplateToken token)
{
var resolvedCount = 0;
var jobs = token.AssertMapping("jobs");
foreach (var jobsProperty in jobs)
{
var job = jobsProperty.Value.AssertMapping("jobs property value");
var scopes = new SequenceToken(null, null, null);
foreach (var jobProperty in job)
{
var jobPropertyName = jobProperty.Key.AssertString("job property name");
switch (jobPropertyName.Value)
{
case PipelineTemplateConstants.Steps:
resolvedCount += ResolveStepsTemplateReferences(context, templateLoader, jobProperty.Value, scopes);
break;
}
}
if (scopes.Count > 0)
{
var scopesPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Scopes);
job.Add(scopesPropertyName, scopes);
context.Memory.AddBytes(scopesPropertyName);
context.Memory.AddBytes(scopes); // Do not traverse, nested objects already accounted for
}
}
return resolvedCount;
}
private Int32 ResolveStepsTemplateReferences(
TemplateContext context,
YamlTemplateLoader templateLoader,
TemplateToken token,
SequenceToken scopes)
{
var resolvedCount = 0;
var steps = token.AssertSequence("steps");
var stepIndex = 0;
while (stepIndex < steps.Count && context.Errors.Count == 0)
{
var step = steps[stepIndex].AssertMapping("step");
if (!TemplateReference.TryCreate(step, out var reference))
{
stepIndex++;
continue;
}
resolvedCount++;
var template = templateLoader.LoadFile(
context,
reference.TemplatePath.FileId,
reference.TemplateScope,
reference.TemplatePath.Value,
PipelineTemplateConstants.StepsTemplateRoot);
if (context.Errors.Count != 0)
{
break;
}
var scope = reference.CreateScope(context, template);
if (context.Errors.Count != 0)
{
break;
}
// Remove the template reference and memory overhead
steps.RemoveAt(stepIndex);
context.Memory.SubtractBytes(step, true); // Traverse
// Remove the template memory overhead
context.Memory.SubtractBytes(template, true); // Traverse
var templateSteps = GetSteps(template);
if (templateSteps?.Count > 0)
{
// Add the steps from the template
steps.InsertRange(stepIndex, templateSteps);
context.Memory.AddBytes(templateSteps, true); // Traverse
context.Memory.SubtractBytes(templateSteps, false);
// Add the scope
scopes.Add(scope);
context.Memory.AddBytes(scope, true); // Traverse
}
}
return resolvedCount;
}
private SequenceToken GetSteps(TemplateToken template)
{
var mapping = template.AssertMapping("steps template");
foreach (var property in mapping)
{
var propertyName = property.Key.AssertString("steps template property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Steps:
return property.Value.AssertSequence("steps template steps property value");
}
}
return null;
}
private static TemplateSchema s_schema;
private readonly ParseOptions m_parseOptions;
private readonly ITraceWriter m_trace;
}
}

View File

@@ -0,0 +1,26 @@
using System;
using System.ComponentModel;
using System.IO;
using System.Reflection;
using GitHub.DistributedTask.ObjectTemplating.Schema;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class PipelineTemplateSchemaFactory
{
public TemplateSchema CreateSchema()
{
var assembly = Assembly.GetExecutingAssembly();
var json = default(String);
using (var stream = assembly.GetManifestResourceStream("GitHub.DistributedTask.Pipelines.ObjectTemplating.workflow-v1.0.json"))
using (var streamReader = new StreamReader(stream))
{
json = streamReader.ReadToEnd();
}
var objectReader = new JsonObjectReader(null, json);
return TemplateSchema.Load(objectReader);
}
}
}

View File

@@ -0,0 +1,121 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
using GitHub.DistributedTask.Pipelines.Validation;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class ReferenceNameBuilder
{
internal void AppendSegment(String value)
{
if (String.IsNullOrEmpty(value))
{
return;
}
if (m_name.Length == 0)
{
var first = value[0];
if ((first >= 'a' && first <= 'z') ||
(first >= 'A' && first <= 'Z') ||
first == '_')
{
// Legal first char
}
else if ((first >= '0' && first <= '9') || first == '-')
{
// Illegal first char, but legal char.
// Prepend "_".
m_name.Append("_");
}
else
{
// Illegal char
}
}
else
{
// Separator
m_name.Append(c_separator);
}
foreach (var c in value)
{
if ((c >= 'a' && c <= 'z') ||
(c >= 'A' && c <= 'Z') ||
(c >= '0' && c <= '9') ||
c == '_' ||
c == '-')
{
// Legal
m_name.Append(c);
}
else
{
// Illegal
m_name.Append("_");
}
}
}
internal String Build()
{
var original = m_name.Length > 0 ? m_name.ToString() : "job";
var attempt = 1;
var suffix = default(String);
while (true)
{
if (attempt == 1)
{
suffix = String.Empty;
}
else if (attempt < 1000)
{
suffix = String.Format(CultureInfo.InvariantCulture, "_{0}", attempt);
}
else
{
throw new InvalidOperationException("Unable to create a unique name");
}
var candidate = original.Substring(0, Math.Min(original.Length, PipelineConstants.MaxNodeNameLength - suffix.Length)) + suffix;
if (m_distinctNames.Add(candidate))
{
m_name.Clear();
return candidate;
}
attempt++;
}
}
internal Boolean TryAddKnownName(
String value,
out String error)
{
if (!NameValidation.IsValid(value, allowHyphens: true) && value.Length < PipelineConstants.MaxNodeNameLength)
{
error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and and must be less than {PipelineConstants.MaxNodeNameLength} characters.";
return false;
}
else if (!m_distinctNames.Add(value))
{
error = $"The identifier '{value}' may not be used more than once within the same scope.";
return false;
}
else
{
error = null;
return true;
}
}
private const String c_separator = "_";
private readonly HashSet<String> m_distinctNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
private readonly StringBuilder m_name = new StringBuilder();
}
}

View File

@@ -0,0 +1,37 @@
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
public static class TaskResultExtensions
{
public static PipelineContextData ToContextData(this TaskResult result)
{
switch (result)
{
case TaskResult.Succeeded:
case TaskResult.SucceededWithIssues:
return new StringContextData(PipelineTemplateConstants.Success);
case TaskResult.Failed:
case TaskResult.Abandoned:
return new StringContextData(PipelineTemplateConstants.Failure);
case TaskResult.Canceled:
return new StringContextData(PipelineTemplateConstants.Cancelled);
case TaskResult.Skipped:
return new StringContextData(PipelineTemplateConstants.Skipped);
}
return null;
}
public static PipelineContextData ToContextData(this TaskResult? result)
{
if (result.HasValue)
{
return result.Value.ToContextData();
}
return null;
}
}
}

View File

@@ -0,0 +1,197 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Threading;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
using GitHub.DistributedTask.ObjectTemplating;
internal sealed class TemplateReference
{
private TemplateReference(
String scope,
String id,
String generatedId,
StringToken templatePath,
MappingToken inputs)
{
Scope = scope;
TemplatePath = templatePath;
Inputs = inputs;
if (!String.IsNullOrEmpty(generatedId))
{
Id = generatedId;
m_isGeneratedId = true;
}
else
{
Id = id;
}
}
internal String Id { get; }
internal MappingToken Inputs { get; }
internal String Scope { get; }
internal StringToken TemplatePath { get; }
internal String TemplateScope
{
get
{
return !String.IsNullOrEmpty(Scope) ? $"{Scope}.{Id}" : Id;
}
}
internal MappingToken CreateScope(
TemplateContext context,
TemplateToken template)
{
var mapping = template.AssertMapping("template file");
// Get the inputs and outputs from the template
var inputs = default(MappingToken);
var outputs = default(MappingToken);
foreach (var pair in mapping)
{
var propertyName = pair.Key.AssertString("template file property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Inputs:
inputs = pair.Value.AssertMapping("template file inputs");
break;
case PipelineTemplateConstants.Outputs:
if (!m_isGeneratedId)
{
outputs = pair.Value.AssertMapping("template file outputs");
}
break;
}
}
// Determine allowed input names
var allowedInputNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
if (inputs?.Count > 0)
{
foreach (var pair in inputs)
{
var inputPropertyName = pair.Key.AssertString("template file inputs property");
allowedInputNames.Add(inputPropertyName.Value);
}
}
// Validate override inputs names
var overrideInputs = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var mergedInputs = new MappingToken(null, null, null);
if (Inputs?.Count > 0)
{
foreach (var pair in Inputs)
{
var inputPropertyName = pair.Key.AssertString("template reference inputs property");
if (!allowedInputNames.Contains(inputPropertyName.Value))
{
context.Error(inputPropertyName, $"Input '{inputPropertyName.Value}' is not allowed");
continue;
}
overrideInputs.Add(inputPropertyName.Value);
mergedInputs.Add(pair.Key, pair.Value);
}
}
// Merge defaults
if (inputs?.Count > 0)
{
foreach (var pair in inputs)
{
var inputPropertyName = pair.Key.AssertString("template file inputs property");
if (!overrideInputs.Contains(inputPropertyName.Value))
{
mergedInputs.Add(pair.Key, pair.Value);
}
}
}
// Build the scope object
var result = new MappingToken(null, null, null);
var namePropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Name);
var namePropertyValue = new StringToken(null, null, null, TemplateScope);
result.Add(namePropertyName, namePropertyValue);
if (mergedInputs.Count > 0)
{
var inputsPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Inputs);
result.Add(inputsPropertyName, mergedInputs);
}
if (outputs?.Count > 0)
{
var outputsPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Outputs);
result.Add(outputsPropertyName, outputs);
}
return result;
}
internal static Boolean TryCreate(
MappingToken mapping,
out TemplateReference reference)
{
var scope = default(String);
var id = default(String);
var generatedId = default(String);
var templatePath = default(StringToken);
var inputs = default(MappingToken);
foreach (var property in mapping)
{
var propertyName = property.Key.AssertString("candidate template reference property name");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Scope:
var scopeStringToken = property.Value.AssertString("step scope");
scope = scopeStringToken.Value;
break;
case PipelineTemplateConstants.Id:
var idStringToken = property.Value.AssertString("step id");
id = idStringToken.Value;
break;
case PipelineTemplateConstants.GeneratedId:
var generatedIdStringToken = property.Value.AssertString("step generated id");
generatedId = generatedIdStringToken.Value;
break;
case PipelineTemplateConstants.Template:
templatePath = property.Value.AssertString("step template reference");
break;
case PipelineTemplateConstants.Inputs:
inputs = property.Value.AssertMapping("step template reference inputs");
break;
}
}
if (templatePath != null)
{
reference = new TemplateReference(scope, id, generatedId, templatePath, inputs);
return true;
}
else
{
reference = null;
return false;
}
}
private Boolean m_isGeneratedId;
}
}

View File

@@ -0,0 +1,572 @@
using System;
using System.Globalization;
using System.IO;
using System.Linq;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using YamlDotNet.Core;
using YamlDotNet.Core.Events;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Converts a YAML file into a TemplateToken
/// </summary>
internal sealed class YamlObjectReader : IObjectReader
{
internal YamlObjectReader(
Int32? fileId,
TextReader input)
{
m_fileId = fileId;
m_parser = new Parser(input);
}
public Boolean AllowLiteral(out LiteralToken value)
{
if (EvaluateCurrent() is Scalar scalar)
{
// Tag specified
if (!String.IsNullOrEmpty(scalar.Tag))
{
// String tag
if (String.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
// Not plain style
if (scalar.Style != ScalarStyle.Plain)
{
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
}
// Boolean, Float, Integer, or Null
switch (scalar.Tag)
{
case c_booleanTag:
value = ParseBoolean(scalar);
break;
case c_floatTag:
value = ParseFloat(scalar);
break;
case c_integerTag:
value = ParseInteger(scalar);
break;
case c_nullTag:
value = ParseNull(scalar);
break;
default:
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
}
MoveNext();
return true;
}
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
if (scalar.Style == ScalarStyle.Plain)
{
if (MatchNull(scalar, out var nullToken))
{
value = nullToken;
}
else if (MatchBoolean(scalar, out var booleanToken))
{
value = booleanToken;
}
else if (MatchInteger(scalar, out var numberToken) ||
MatchFloat(scalar, out numberToken))
{
value = numberToken;
}
else
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
}
MoveNext();
return true;
}
// Otherwise assume string
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceStart(out SequenceToken value)
{
if (EvaluateCurrent() is SequenceStart sequenceStart)
{
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceEnd()
{
if (EvaluateCurrent() is SequenceEnd)
{
MoveNext();
return true;
}
return false;
}
public Boolean AllowMappingStart(out MappingToken value)
{
if (EvaluateCurrent() is MappingStart mappingStart)
{
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowMappingEnd()
{
if (EvaluateCurrent() is MappingEnd)
{
MoveNext();
return true;
}
return false;
}
/// <summary>
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
/// </summary>
public void ValidateEnd()
{
if (EvaluateCurrent() is DocumentEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document end parse event");
}
if (EvaluateCurrent() is StreamEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream end parse event");
}
if (MoveNext())
{
throw new InvalidOperationException("Expected end of parse events");
}
}
/// <summary>
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
/// </summary>
public void ValidateStart()
{
if (EvaluateCurrent() != null)
{
throw new InvalidOperationException("Unexpected parser state");
}
if (!MoveNext())
{
throw new InvalidOperationException("Expected a parse event");
}
if (EvaluateCurrent() is StreamStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream start parse event");
}
if (EvaluateCurrent() is DocumentStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document start parse event");
}
}
private ParsingEvent EvaluateCurrent()
{
if (m_current == null)
{
m_current = m_parser.Current;
if (m_current != null)
{
if (m_current is Scalar scalar)
{
// Verify not using achors
if (scalar.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
}
}
else if (m_current is MappingStart mappingStart)
{
// Verify not using achors
if (mappingStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
}
}
else if (m_current is SequenceStart sequenceStart)
{
// Verify not using achors
if (sequenceStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
}
}
else if (!(m_current is MappingEnd) &&
!(m_current is SequenceEnd) &&
!(m_current is DocumentStart) &&
!(m_current is DocumentEnd) &&
!(m_current is StreamStart) &&
!(m_current is StreamEnd))
{
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
}
}
}
return m_current;
}
private Boolean MoveNext()
{
m_current = null;
return m_parser.MoveNext();
}
private BooleanToken ParseBoolean(Scalar scalar)
{
if (MatchBoolean(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_booleanTag); // throws
return default;
}
private NumberToken ParseFloat(Scalar scalar)
{
if (MatchFloat(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_floatTag); // throws
return default;
}
private NumberToken ParseInteger(Scalar scalar)
{
if (MatchInteger(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_integerTag); // throws
return default;
}
private NullToken ParseNull(Scalar scalar)
{
if (MatchNull(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_nullTag); // throws
return default;
}
private Boolean MatchBoolean(
Scalar scalar,
out BooleanToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "true":
case "True":
case "TRUE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
return true;
case "false":
case "False":
case "FALSE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
return true;
}
value = default;
return false;
}
private Boolean MatchFloat(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
switch (str)
{
case ".inf":
case ".Inf":
case ".INF":
case "+.inf":
case "+.Inf":
case "+.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
return true;
case "-.inf":
case "-.Inf":
case "-.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
return true;
case ".nan":
case ".NaN":
case ".NAN":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
return true;
}
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
// Skip leading sign
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
// Check for integer portion
var length = str.Length;
var hasInteger = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasInteger = true;
index++;
}
// Check for decimal point
var hasDot = false;
if (index < length && str[index] == '.')
{
hasDot = true;
index++;
}
// Check for decimal portion
var hasDecimal = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasDecimal = true;
index++;
}
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
if ((hasDot && hasDecimal) || hasInteger)
{
// Check for end
if (index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
// Check [eE][-+]?[0-9]
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
{
index++;
// Skip sign
if (index < length && (str[index] == '-' || str[index] == '+'))
{
index++;
}
// Check for exponent
var hasExponent = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasExponent = true;
index++;
}
// Check for end
if (hasExponent && index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
}
}
}
value = default;
return false;
}
private Boolean MatchInteger(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [0-9]+
var firstChar = str[0];
if (firstChar >= '0' && firstChar <= '9' &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for (-|+)[0-9]+
else if ((firstChar == '-' || firstChar == '+') &&
str.Length > 1 &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0x[0-9a-fA-F]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'x' &&
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
{
// Try parse
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0o[0-9]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'o' &&
str.Skip(2).All(x => x >= '0' && x <= '7'))
{
// Try parse
var integerValue = default(Int32);
try
{
integerValue = Convert.ToInt32(str.Substring(2), 8);
}
// Otherwise exceeds range
catch (Exception)
{
ThrowInvalidValue(scalar, c_integerTag); // throws
}
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
}
value = default;
return false;
}
private Boolean MatchNull(
Scalar scalar,
out NullToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "":
case "null":
case "Null":
case "NULL":
case "~":
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
return true;
}
value = default;
return false;
}
private void ThrowInvalidValue(
Scalar scalar,
String tag)
{
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'");
}
private const String c_booleanTag = "tag:yaml.org,2002:bool";
private const String c_floatTag = "tag:yaml.org,2002:float";
private const String c_integerTag = "tag:yaml.org,2002:int";
private const String c_nullTag = "tag:yaml.org,2002:null";
private const String c_stringTag = "tag:yaml.org,2002:string";
private readonly Int32? m_fileId;
private readonly Parser m_parser;
private ParsingEvent m_current;
}
}

View File

@@ -0,0 +1,73 @@
using System;
using System.Globalization;
using System.IO;
using GitHub.DistributedTask.ObjectTemplating;
using YamlDotNet.Core.Events;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Converts a TemplateToken into YAML
/// </summary>
internal sealed class YamlObjectWriter : IObjectWriter
{
internal YamlObjectWriter(StringWriter writer)
{
m_emitter = new YamlDotNet.Core.Emitter(writer);
}
public void WriteString(String value)
{
m_emitter.Emit(new Scalar(value ?? String.Empty));
}
public void WriteBoolean(Boolean value)
{
m_emitter.Emit(new Scalar(value ? "true" : "false"));
}
public void WriteNumber(Double value)
{
m_emitter.Emit(new Scalar(value.ToString("G15", CultureInfo.InvariantCulture)));
}
public void WriteNull()
{
m_emitter.Emit(new Scalar("null"));
}
public void WriteSequenceStart()
{
m_emitter.Emit(new SequenceStart(null, null, true, SequenceStyle.Block));
}
public void WriteSequenceEnd()
{
m_emitter.Emit(new SequenceEnd());
}
public void WriteMappingStart()
{
m_emitter.Emit(new MappingStart());
}
public void WriteMappingEnd()
{
m_emitter.Emit(new MappingEnd());
}
public void WriteStart()
{
m_emitter.Emit(new StreamStart());
m_emitter.Emit(new DocumentStart());
}
public void WriteEnd()
{
m_emitter.Emit(new DocumentEnd(isImplicit: true));
m_emitter.Emit(new StreamEnd());
}
private readonly YamlDotNet.Core.IEmitter m_emitter;
}
}

View File

@@ -0,0 +1,251 @@
using System;
using System.Collections.Generic;
using System.IO;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Loads a YAML file, and returns the parsed TemplateToken
/// </summary>
internal sealed class YamlTemplateLoader
{
public YamlTemplateLoader(
ParseOptions parseOptions,
IFileProvider fileProvider)
{
m_parseOptions = new ParseOptions(parseOptions);
m_fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider));
}
public TemplateToken LoadFile(
TemplateContext context,
Int32? rootFileId,
String scope,
String path,
String templateType)
{
if (context.Errors.Count > 0)
{
throw new InvalidOperationException("Expected error count to be 0 when attempting to load a new file");
}
// Is entry file?
var isEntryFile = m_referencedFiles.Count == 0;
// Root the path
path = m_fileProvider.ResolvePath(null, path);
// Validate max files
m_referencedFiles.Add(path);
if (m_parseOptions.MaxFiles > 0 && m_referencedFiles.Count > m_parseOptions.MaxFiles)
{
throw new InvalidOperationException($"The maximum file count of {m_parseOptions.MaxFiles} has been exceeded");
}
// Get the file ID
var fileId = context.GetFileId(path);
// Check the cache
if (!m_cache.TryGetValue(path, out String fileContent))
{
// Fetch the file
context.CancellationToken.ThrowIfCancellationRequested();
fileContent = m_fileProvider.GetFileContent(path);
// Validate max file size
if (fileContent.Length > m_parseOptions.MaxFileSize)
{
throw new InvalidOperationException($"The maximum file size of {m_parseOptions.MaxFileSize} characters has been exceeded");
}
// Cache
m_cache[path] = fileContent;
}
// Deserialize
var token = default(TemplateToken);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(context, templateType, yamlObjectReader, fileId, out _);
}
// Trace
if (!isEntryFile)
{
context.TraceWriter.Info(String.Empty);
}
context.TraceWriter.Info("# ");
context.TraceWriter.Info("# {0}", path);
context.TraceWriter.Info("# ");
// Validate ref names
if (context.Errors.Count == 0)
{
switch (templateType)
{
case PipelineTemplateConstants.WorkflowRoot:
ValidateWorkflow(context, scope, token);
break;
case PipelineTemplateConstants.StepsTemplateRoot:
var stepsTemplate = token.AssertMapping("steps template");
foreach (var stepsTemplateProperty in stepsTemplate)
{
var stepsTemplatePropertyName = stepsTemplateProperty.Key.AssertString("steps template property name");
switch (stepsTemplatePropertyName.Value)
{
case PipelineTemplateConstants.Steps:
ValidateSteps(context, scope, stepsTemplateProperty.Value);
break;
}
}
break;
default:
throw new NotImplementedException($"Unexpected template type '{templateType}' when loading yaml file");
}
}
return token;
}
private void ValidateWorkflow(
TemplateContext context,
String scope,
TemplateToken token)
{
var workflow = token.AssertMapping("workflow");
foreach (var workflowProperty in workflow)
{
var workflowPropertyName = workflowProperty.Key.AssertString("workflow property name");
switch (workflowPropertyName.Value)
{
case PipelineTemplateConstants.Jobs:
case PipelineTemplateConstants.Workflow:
var jobs = workflowProperty.Value.AssertMapping("workflow property value");
foreach (var jobsProperty in jobs)
{
var job = jobsProperty.Value.AssertMapping("jobs property value");
foreach (var jobProperty in job)
{
var jobPropertyName = jobProperty.Key.AssertString("job property name");
switch (jobPropertyName.Value)
{
case PipelineTemplateConstants.Steps:
ValidateSteps(context, scope, jobProperty.Value);
break;
}
}
}
break;
}
}
}
private void ValidateSteps(
TemplateContext context,
String scope,
TemplateToken token)
{
var nameBuilder = new ReferenceNameBuilder();
var steps = token.AssertSequence("steps");
var needsReferenceName = new List<MappingToken>();
foreach (var stepsItem in steps)
{
var step = stepsItem.AssertMapping("steps item");
var isTemplateReference = false;
var hasReferenceName = false;
foreach (var stepProperty in step)
{
var stepPropertyKey = stepProperty.Key.AssertString("step property name");
switch (stepPropertyKey.Value)
{
// Validate reference names
case PipelineTemplateConstants.Id:
var referenceNameLiteral = stepProperty.Value.AssertString("step ID");
var referenceName = referenceNameLiteral.Value;
if (String.IsNullOrEmpty(referenceName))
{
continue;
}
if (!nameBuilder.TryAddKnownName(referenceName, out var error))
{
context.Error(referenceNameLiteral, error);
}
hasReferenceName = true;
break;
case PipelineTemplateConstants.Template:
isTemplateReference = true;
break;
}
}
// No reference name
if (isTemplateReference && !hasReferenceName)
{
needsReferenceName.Add(step);
}
// Stamp the scope
if (!String.IsNullOrEmpty(scope))
{
var scopePropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Scope);
var scopePropertyValue = new StringToken(null, null, null, scope);
step.Add(scopePropertyName, scopePropertyValue);
context.Memory.AddBytes(scopePropertyName);
context.Memory.AddBytes(scopePropertyValue);
}
}
// Generate reference names
if (needsReferenceName.Count > 0 && context.Errors.Count == 0)
{
foreach (var step in needsReferenceName)
{
// Get the template path
var templatePath = default(String);
foreach (var stepProperty in step)
{
var stepPropertyKey = stepProperty.Key.AssertString("step property name");
switch (stepPropertyKey.Value)
{
case PipelineTemplateConstants.Template:
var templateStringToken = stepProperty.Value.AssertString("step template path");
templatePath = templateStringToken.Value;
break;
}
}
// Generate reference name
if (!String.IsNullOrEmpty(templatePath))
{
nameBuilder.AppendSegment(templatePath);
var generatedIdPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.GeneratedId);
var generatedIdPropertyValue = new StringToken(null, null, null, nameBuilder.Build());
step.Add(generatedIdPropertyName, generatedIdPropertyValue);
context.Memory.AddBytes(generatedIdPropertyName);
context.Memory.AddBytes(generatedIdPropertyValue);
}
}
}
}
/// <summary>
/// Cache of file content
/// </summary>
private readonly Dictionary<String, String> m_cache = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
private readonly IFileProvider m_fileProvider;
private readonly ParseOptions m_parseOptions;
/// <summary>
/// Tracks unique file references
/// </summary>
private readonly HashSet<String> m_referencedFiles = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
}
}