Compare updated template evaluator (#4092)

This commit is contained in:
eric sciple
2025-11-07 14:18:52 -06:00
committed by GitHub
parent 53d69ff441
commit b5b7986cd6
188 changed files with 27222 additions and 4 deletions

View File

@@ -0,0 +1,85 @@
#nullable enable
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public sealed class ActionStep : IStep
{
[DataMember(Order = 0, Name = "id", EmitDefaultValue = false)]
public string? Id
{
get;
set;
}
/// <summary>
/// Gets or sets the display name
/// </summary>
[DataMember(Order = 1, Name = "name", EmitDefaultValue = false)]
public ScalarToken? Name
{
get;
set;
}
[DataMember(Order = 2, Name = "if", EmitDefaultValue = false)]
public BasicExpressionToken? If
{
get;
set;
}
[DataMember(Order = 3, Name = "continue-on-error", EmitDefaultValue = false)]
public ScalarToken? ContinueOnError
{
get;
set;
}
[DataMember(Order = 4, Name = "timeout-minutes", EmitDefaultValue = false)]
public ScalarToken? TimeoutMinutes
{
get;
set;
}
[DataMember(Order = 5, Name = "env", EmitDefaultValue = false)]
public TemplateToken? Env
{
get;
set;
}
[DataMember(Order = 6, Name = "uses", EmitDefaultValue = false)]
public StringToken? Uses
{
get;
set;
}
[DataMember(Order = 7, Name = "with", EmitDefaultValue = false)]
public TemplateToken? With
{
get;
set;
}
public IStep Clone(bool omitSource)
{
return new ActionStep
{
ContinueOnError = ContinueOnError?.Clone(omitSource) as ScalarToken,
Env = Env?.Clone(omitSource),
Id = Id,
If = If?.Clone(omitSource) as BasicExpressionToken,
Name = Name?.Clone(omitSource) as ScalarToken,
TimeoutMinutes = TimeoutMinutes?.Clone(omitSource) as ScalarToken,
Uses = Uses?.Clone(omitSource) as StringToken,
With = With?.Clone(omitSource),
};
}
}
}

View File

@@ -0,0 +1,25 @@
#nullable enable
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser
{
/// <summary>
/// Information about an environment parsed from YML with evaluated name, URL will be evaluated on runner
/// </summary>
[DataContract]
public sealed class ActionsEnvironmentReference
{
public ActionsEnvironmentReference(string name)
{
Name = name;
}
[DataMember]
public string Name { get; set; }
[DataMember]
public TemplateToken? Url { get; set; }
}
}

View File

@@ -0,0 +1,22 @@
using System.Collections.Generic;
namespace GitHub.Actions.WorkflowParser
{
internal static class CollectionsExtensions
{
/// <summary>
/// Adds all of the given values to this collection.
/// Can be used with dictionaries, which implement <see cref="ICollection{T}"/> and <see cref="IEnumerable{T}"/> where T is <see cref="KeyValuePair{TKey, TValue}"/>.
/// </summary>
public static TCollection AddRange<T, TCollection>(this TCollection collection, IEnumerable<T> values)
where TCollection : ICollection<T>
{
foreach (var value in values)
{
collection.Add(value);
}
return collection;
}
}
}

View File

@@ -0,0 +1,14 @@
using System;
namespace GitHub.Actions.WorkflowParser.Conversion
{
internal sealed class EmptyServerTraceWriter : IServerTraceWriter
{
public void TraceAlways(
Int32 tracepoint,
String format,
params Object[] arguments)
{
}
}
}

View File

@@ -0,0 +1,183 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
namespace GitHub.Actions.WorkflowParser.Conversion
{
/// <summary>
/// Builder for job and step IDs
/// </summary>
internal sealed class IdBuilder
{
internal void AppendSegment(String value)
{
if (String.IsNullOrEmpty(value))
{
return;
}
if (m_name.Length == 0)
{
var first = value[0];
if ((first >= 'a' && first <= 'z') ||
(first >= 'A' && first <= 'Z') ||
first == '_')
{
// Legal first char
}
else if ((first >= '0' && first <= '9') || first == '-')
{
// Illegal first char, but legal char.
// Prepend "_".
m_name.Append("_");
}
else
{
// Illegal char
}
}
else
{
// Separator
m_name.Append(c_separator);
}
foreach (var c in value)
{
if ((c >= 'a' && c <= 'z') ||
(c >= 'A' && c <= 'Z') ||
(c >= '0' && c <= '9') ||
c == '_' ||
c == '-')
{
// Legal
m_name.Append(c);
}
else
{
// Illegal
m_name.Append("_");
}
}
}
/// <summary>
/// Builds the ID from the segments
/// </summary>
/// <param name="allowReservedPrefix">When true, generated IDs may begin with "__" depending upon the segments
/// and collisions with known IDs. When false, generated IDs will never begin with the reserved prefix "__".</param>
/// <param name="maxLength">The maximum length of the generated ID.</param>
internal String Build(
Boolean allowReservedPrefix,
Int32 maxLength = WorkflowConstants.MaxNodeNameLength)
{
// Ensure reasonable max length
if (maxLength <= 5) // Must be long enough to accommodate at least one character + length of max suffix "_999" (refer suffix logic further below)
{
maxLength = WorkflowConstants.MaxNodeNameLength;
}
var original = m_name.Length > 0 ? m_name.ToString() : "job";
// Avoid prefix "__" when not allowed
if (!allowReservedPrefix && original.StartsWith("__", StringComparison.Ordinal))
{
original = $"_{original.TrimStart('_')}";
}
var attempt = 1;
var suffix = default(String);
while (true)
{
if (attempt == 1)
{
suffix = String.Empty;
}
else if (attempt < 1000)
{
// Special case to avoid prefix "__" when not allowed
if (!allowReservedPrefix && String.Equals(original, "_", StringComparison.Ordinal))
{
suffix = String.Format(CultureInfo.InvariantCulture, "{0}", attempt);
}
else
{
suffix = String.Format(CultureInfo.InvariantCulture, "_{0}", attempt);
}
}
else
{
throw new InvalidOperationException("Unable to create a unique name");
}
var candidate = original.Substring(0, Math.Min(original.Length, maxLength - suffix.Length)) + suffix;
if (m_distinctNames.Add(candidate))
{
m_name.Clear();
return candidate;
}
attempt++;
}
}
internal Boolean TryAddKnownId(
String value,
out String error)
{
if (String.IsNullOrEmpty(value) ||
!IsValid(value) ||
value.Length >= WorkflowConstants.MaxNodeNameLength)
{
error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and must be less than {WorkflowConstants.MaxNodeNameLength} characters.";
return false;
}
else if (value.StartsWith("__", StringComparison.Ordinal))
{
error = $"The identifier '{value}' is invalid. IDs starting with '__' are reserved.";
return false;
}
else if (!m_distinctNames.Add(value))
{
error = $"The identifier '{value}' may not be used more than once within the same scope.";
return false;
}
else
{
error = null;
return true;
}
}
private static Boolean IsValid(String name)
{
var result = true;
for (Int32 i = 0; i < name.Length; i++)
{
if ((name[i] >= 'a' && name[i] <= 'z') ||
(name[i] >= 'A' && name[i] <= 'Z') ||
(name[i] >= '0' && name[i] <= '9' && i > 0) ||
(name[i] == '_') ||
(name[i] == '-' && i > 0))
{
continue;
}
else
{
result = false;
break;
}
}
return result;
}
private const String c_separator = "_";
private readonly HashSet<String> m_distinctNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
private readonly StringBuilder m_name = new StringBuilder();
}
}

View File

@@ -0,0 +1,44 @@
#nullable enable
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.Conversion
{
internal sealed class JobCountValidator
{
public JobCountValidator(
TemplateContext context,
Int32 maxCount)
{
m_context = context ?? throw new ArgumentNullException(nameof(context));
m_maxCount = maxCount;
}
/// <summary>
/// Increments the job counter.
///
/// Appends an error to the template context only when the max job count is initially exceeded.
/// Additional calls will not append more errors.
/// </summary>
/// <param name="token">The token to use for error reporting.</param>
public void Increment(TemplateToken? token)
{
// Initial breach?
if (m_maxCount > 0 &&
m_count + 1 > m_maxCount &&
m_count <= m_maxCount)
{
m_context.Error(token, $"Workflows may not contain more than {m_maxCount} jobs across all referenced files");
}
// Increment
m_count++;
}
private readonly TemplateContext m_context;
private readonly Int32 m_maxCount;
private Int32 m_count;
}
}

View File

@@ -0,0 +1,64 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Globalization;
namespace GitHub.Actions.WorkflowParser.Conversion
{
/// <summary>
/// Builder for job display names. Used when appending strategy configuration values to build a display name.
/// </summary>
internal sealed class JobNameBuilder
{
public JobNameBuilder(String jobName)
{
if (!String.IsNullOrEmpty(jobName))
{
m_jobName = jobName;
m_segments = new List<String>();
}
}
public void AppendSegment(String value)
{
if (String.IsNullOrEmpty(value) || m_segments == null)
{
return;
}
m_segments.Add(value);
}
public String Build()
{
if (String.IsNullOrEmpty(m_jobName))
{
return null;
}
var name = default(String);
if (m_segments.Count == 0)
{
name = m_jobName;
}
else
{
var joinedSegments = String.Join(", ", m_segments);
name = String.Format(CultureInfo.InvariantCulture, "{0} ({1})", m_jobName, joinedSegments);
}
const Int32 maxNameLength = 100;
if (name.Length > maxNameLength)
{
name = name.Substring(0, maxNameLength - 3) + "...";
}
m_segments.Clear();
return name;
}
private readonly String m_jobName;
private readonly List<String> m_segments;
}
}

View File

@@ -0,0 +1,236 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.WorkflowParser.Conversion
{
internal sealed class JsonObjectReader : IObjectReader
{
internal JsonObjectReader(
Int32? fileId,
String input)
{
m_fileId = fileId;
var token = JToken.Parse(input);
m_enumerator = GetEvents(token, true).GetEnumerator();
m_enumerator.MoveNext();
}
public Boolean AllowLiteral(out LiteralToken literal)
{
var current = m_enumerator.Current;
switch (current.Type)
{
case ParseEventType.Null:
literal = new NullToken(m_fileId, current.Line, current.Column);
m_enumerator.MoveNext();
return true;
case ParseEventType.Boolean:
literal = new BooleanToken(m_fileId, current.Line, current.Column, (Boolean)current.Value);
m_enumerator.MoveNext();
return true;
case ParseEventType.Number:
literal = new NumberToken(m_fileId, current.Line, current.Column, (Double)current.Value);
m_enumerator.MoveNext();
return true;
case ParseEventType.String:
literal = new StringToken(m_fileId, current.Line, current.Column, (String)current.Value);
m_enumerator.MoveNext();
return true;
}
literal = null;
return false;
}
public Boolean AllowSequenceStart(out SequenceToken sequence)
{
var current = m_enumerator.Current;
if (current.Type == ParseEventType.SequenceStart)
{
sequence = new SequenceToken(m_fileId, current.Line, current.Column);
m_enumerator.MoveNext();
return true;
}
sequence = null;
return false;
}
public Boolean AllowSequenceEnd()
{
if (m_enumerator.Current.Type == ParseEventType.SequenceEnd)
{
m_enumerator.MoveNext();
return true;
}
return false;
}
public Boolean AllowMappingStart(out MappingToken mapping)
{
var current = m_enumerator.Current;
if (current.Type == ParseEventType.MappingStart)
{
mapping = new MappingToken(m_fileId, current.Line, current.Column);
m_enumerator.MoveNext();
return true;
}
mapping = null;
return false;
}
public Boolean AllowMappingEnd()
{
if (m_enumerator.Current.Type == ParseEventType.MappingEnd)
{
m_enumerator.MoveNext();
return true;
}
return false;
}
/// <summary>
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
/// </summary>
public void ValidateEnd()
{
if (m_enumerator.Current.Type == ParseEventType.DocumentEnd)
{
m_enumerator.MoveNext();
return;
}
throw new InvalidOperationException("Expected end of reader");
}
/// <summary>
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
/// </summary>
public void ValidateStart()
{
if (m_enumerator.Current.Type == ParseEventType.DocumentStart)
{
m_enumerator.MoveNext();
return;
}
throw new InvalidOperationException("Expected start of reader");
}
private IEnumerable<ParseEvent> GetEvents(
JToken token,
Boolean root = false)
{
if (root)
{
yield return new ParseEvent(0, 0, ParseEventType.DocumentStart);
}
var lineInfo = token as Newtonsoft.Json.IJsonLineInfo;
var line = lineInfo.LineNumber;
var column = lineInfo.LinePosition;
switch (token.Type)
{
case JTokenType.Null:
yield return new ParseEvent(line, column, ParseEventType.Null, null);
break;
case JTokenType.Boolean:
yield return new ParseEvent(line, column, ParseEventType.Boolean, token.ToObject<Boolean>());
break;
case JTokenType.Float:
case JTokenType.Integer:
yield return new ParseEvent(line, column, ParseEventType.Number, token.ToObject<Double>());
break;
case JTokenType.String:
yield return new ParseEvent(line, column, ParseEventType.String, token.ToObject<String>());
break;
case JTokenType.Array:
yield return new ParseEvent(line, column, ParseEventType.SequenceStart);
foreach (var item in (token as JArray))
{
foreach (var e in GetEvents(item))
{
yield return e;
}
}
yield return new ParseEvent(line, column, ParseEventType.SequenceEnd);
break;
case JTokenType.Object:
yield return new ParseEvent(line, column, ParseEventType.MappingStart);
foreach (var pair in (token as JObject))
{
yield return new ParseEvent(line, column, ParseEventType.String, pair.Key ?? String.Empty);
foreach (var e in GetEvents(pair.Value))
{
yield return e;
}
}
yield return new ParseEvent(line, column, ParseEventType.MappingEnd);
break;
default:
throw new NotSupportedException($"Unexpected JTokenType {token.Type}");
}
if (root)
{
yield return new ParseEvent(0, 0, ParseEventType.DocumentEnd);
}
}
private struct ParseEvent
{
public ParseEvent(
Int32 line,
Int32 column,
ParseEventType type,
Object value = null)
{
Line = line;
Column = column;
Type = type;
Value = value;
}
public readonly Int32 Line;
public readonly Int32 Column;
public readonly ParseEventType Type;
public readonly Object Value;
}
private enum ParseEventType
{
None = 0,
Null,
Boolean,
Number,
String,
SequenceStart,
SequenceEnd,
MappingStart,
MappingEnd,
DocumentStart,
DocumentEnd,
}
private IEnumerator<ParseEvent> m_enumerator;
private Int32? m_fileId;
}
}

View File

@@ -0,0 +1,738 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using GitHub.Actions.Expressions;
using GitHub.Actions.Expressions.Data;
using GitHub.Actions.Expressions.Sdk;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.Conversion
{
/// <summary>
/// Used to build a matrix cross product and apply include/exclude filters.
/// </summary>
internal sealed class MatrixBuilder
{
internal MatrixBuilder(
TemplateContext context,
String jobName)
{
m_context = context;
m_jobName = jobName;
}
/// <summary>
/// Adds an input vector. <c ref="Build" /> creates a cross product from all input vectors.
///
/// For example, given the matrix:
/// arch: [x64, x86]
/// os: [linux, windows]
///
/// This method should be called twice:
/// AddVector("arch", ...);
/// AddVector("os", ...)
/// </summary>
internal void AddVector(
String name,
SequenceToken vector)
{
m_vectors.Add(name, vector.ToExpressionData());
}
/// <summary>
/// Adds the sequence containg all exclude mappings.
/// </summary>
internal void Exclude(SequenceToken exclude)
{
m_excludeSequence = exclude;
}
/// <summary>
/// Adds the sequence containg all include mappings.
/// </summary>
internal void Include(SequenceToken include)
{
m_includeSequence = include;
}
/// <summary>
/// Builds the matrix.
///
/// In addition to computing the cross product of all input vectors, this method also:
/// 1. Applies all exclude filters against each cross product vector
/// 2. Applies all include filters against each cross product vector, which may
/// add additional values into existing vectors
/// 3. Appends all unmatched include vectors, as additional result vectors
///
/// Example 1, simple cross product:
/// arch: [x64, x86]
/// os: [linux, windows]
/// The result would contain the following vectors:
/// [arch: x64, os: linux]
/// [arch: x64, os: windows]
/// [arch: x86, os: linux]
/// [arch: x86, os: windows]
///
/// Example 2, using exclude filter:
/// arch: [x64, x86]
/// os: [linux, windows]
/// exclude:
/// - arch: x86
/// os: linux
/// The result would contain the following vectors:
/// [arch: x64, os: linux]
/// [arch: x64, os: windows]
/// [arch: x86, os: windows]
///
/// Example 3, using include filter to add additional values:
/// arch: [x64, x86]
/// os: [linux, windows]
/// include:
/// - arch: x64
/// os: linux
/// publish: true
/// The result would contain the following vectors:
/// [arch: x64, os: linux, publish: true]
/// [arch: x64, os: windows]
/// [arch: x86, os: linux]
/// [arch: x86, os: windows]
///
/// Example 4, include additional vectors:
/// arch: [x64, x86]
/// os: [linux, windows]
/// include:
/// - arch: x64
/// - os: macos
/// The result would contain the following vectors:
/// [arch: x64, os: linux]
/// [arch: x64, os: windows]
/// [arch: x86, os: linux]
/// [arch: x86, os: windows]
/// [arch: x64, os: macos]
/// </summary>
/// <returns>One strategy configuration per result vector</returns>
internal IEnumerable<StrategyConfiguration> Build()
{
// Parse includes/excludes
var include = new MatrixInclude(m_context, m_vectors, m_includeSequence);
var exclude = new MatrixExclude(m_context, m_vectors, m_excludeSequence);
// Calculate the cross product size
int productSize;
if (m_vectors.Count > 0)
{
productSize = 1;
foreach (var vectorPair in m_vectors)
{
checked
{
var vector = vectorPair.Value.AssertArray("vector");
productSize *= vector.Count;
}
}
}
else
{
productSize = 0;
}
var idBuilder = new IdBuilder();
// Cross product vectors
for (var productIndex = 0; productIndex < productSize; productIndex++)
{
// Matrix
var matrix = new DictionaryExpressionData();
var blockSize = productSize;
foreach (var vectorPair in m_vectors)
{
var vectorName = vectorPair.Key;
var vector = vectorPair.Value.AssertArray("vector");
blockSize = blockSize / vector.Count;
var vectorIndex = (productIndex / blockSize) % vector.Count;
matrix.Add(vectorName, vector[vectorIndex]);
}
// Exclude
if (exclude.Match(matrix))
{
continue;
}
// Include extra values in the vector
include.Match(matrix, out var extra);
// Create the configuration
yield return CreateConfiguration(idBuilder, matrix, extra);
}
// Explicit vectors
foreach (var matrix in include.GetUnmatchedVectors())
{
yield return CreateConfiguration(idBuilder, matrix, null);
}
}
private StrategyConfiguration CreateConfiguration(
IdBuilder idBuilder,
DictionaryExpressionData matrix,
DictionaryExpressionData extra)
{
// New configuration
var configuration = new StrategyConfiguration();
m_context.Memory.AddBytes(TemplateMemory.MinObjectSize);
// Gather segments for ID and display name
var nameBuilder = new JobNameBuilder(m_jobName);
foreach (var matrixData in matrix.Traverse(omitKeys: true))
{
var segment = default(String);
if (matrixData is BooleanExpressionData || matrixData is NumberExpressionData || matrixData is StringExpressionData)
{
segment = matrixData.ToString();
}
if (!String.IsNullOrEmpty(segment))
{
// ID segment
idBuilder.AppendSegment(segment);
// Display name segment
nameBuilder.AppendSegment(segment);
}
}
// Id
configuration.Id = idBuilder.Build(allowReservedPrefix: false, maxLength: m_context.GetFeatures().ShortMatrixIds ? 25 : WorkflowConstants.MaxNodeNameLength);
m_context.Memory.AddBytes(configuration.Id);
// Display name
configuration.Name = nameBuilder.Build();
m_context.Memory.AddBytes(configuration.Name);
// Extra values
if (extra?.Count > 0)
{
matrix.Add(extra);
}
// Matrix context
configuration.ExpressionData.Add(WorkflowTemplateConstants.Matrix, matrix);
m_context.Memory.AddBytes(WorkflowTemplateConstants.Matrix);
m_context.Memory.AddBytes(matrix, traverse: true);
return configuration;
}
/// <summary>
/// Represents the sequence "strategy.matrix.include"
/// </summary>
private sealed class MatrixInclude
{
public MatrixInclude(
TemplateContext context,
DictionaryExpressionData vectors,
SequenceToken includeSequence)
{
// Convert to includes sets
if (includeSequence?.Count > 0)
{
foreach (var includeItem in includeSequence)
{
var includeMapping = includeItem.AssertMapping("matrix includes item");
// Distinguish filters versus extra
var filter = new MappingToken(null, null, null);
var extra = new DictionaryExpressionData();
foreach (var includePair in includeMapping)
{
var includeKeyLiteral = includePair.Key.AssertString("matrix include item key");
if (vectors.ContainsKey(includeKeyLiteral.Value))
{
filter.Add(includeKeyLiteral, includePair.Value);
}
else
{
extra.Add(includeKeyLiteral.Value, includePair.Value.ToExpressionData());
}
}
// At least one filter or extra
if (filter.Count == 0 && extra.Count == 0)
{
context.Error(includeMapping, $"Matrix include mapping does not contain any values");
continue;
}
// Add filter
m_filters.Add(new MatrixIncludeFilter(filter, extra));
}
}
m_matches = new Boolean[m_filters.Count];
}
/// <summary>
/// Matches a vector from the cross product against each include filter.
///
/// For example, given the matrix:
/// arch: [x64, x86]
/// config: [release, debug]
/// include:
/// - arch: x64
/// config: release
/// publish: true
///
/// This method would return the following:
/// Match(
/// matrix: {arch: x64, config: release},
/// out extra: {publish: true})
/// => true
///
/// Match(
/// matrix: {arch: x64, config: debug},
/// out extra: null)
/// => false
///
/// Match(
/// matrix: {arch: x86, config: release},
/// out extra: null)
/// => false
///
/// Match(
/// matrix: {arch: x86, config: debug},
/// out extra: null)
/// => false
/// </summary>
/// <param name="matrix">A vector of the cross product</param>
/// <param name="extra">Extra values to add to the vector</param>
/// <returns>True if the vector matched at least one include filter</returns>
public Boolean Match(
DictionaryExpressionData matrix,
out DictionaryExpressionData extra)
{
extra = default(DictionaryExpressionData);
for (var i = 0; i < m_filters.Count; i++)
{
var filter = m_filters[i];
if (filter.Match(matrix, out var items))
{
m_matches[i] = true;
if (extra == null)
{
extra = new DictionaryExpressionData();
}
foreach (var pair in items)
{
extra[pair.Key] = pair.Value;
}
}
}
return extra != null;
}
/// <summary>
/// Gets all additional vectors to add. These are additional configurations that were not produced
/// from the cross product. These are include vectors that did not match any cross product results.
///
/// For example, given the matrix:
/// arch: [x64, x86]
/// config: [release, debug]
/// include:
/// - arch: arm64
/// config: debug
///
/// This method would return the following:
/// - {arch: arm64, config: debug}
/// </summary>
public IEnumerable<DictionaryExpressionData> GetUnmatchedVectors()
{
for (var i = 0; i < m_filters.Count; i++)
{
if (m_matches[i])
{
continue;
}
var filter = m_filters[i];
var matrix = new DictionaryExpressionData();
foreach (var pair in filter.Filter)
{
var keyLiteral = pair.Key.AssertString("matrix include item key");
matrix.Add(keyLiteral.Value, pair.Value.ToExpressionData());
}
foreach (var includePair in filter.Extra)
{
matrix.Add(includePair.Key, includePair.Value);
}
yield return matrix;
}
}
private readonly List<MatrixIncludeFilter> m_filters = new List<MatrixIncludeFilter>();
// Tracks whether a filter has been matched
private readonly Boolean[] m_matches;
}
/// <summary>
/// Represents an item within the sequence "strategy.matrix.include"
/// </summary>
private sealed class MatrixIncludeFilter : MatrixFilter
{
public MatrixIncludeFilter(
MappingToken filter,
DictionaryExpressionData extra)
: base(filter)
{
Filter = filter;
Extra = extra;
}
public Boolean Match(
DictionaryExpressionData matrix,
out DictionaryExpressionData extra)
{
if (base.Match(matrix))
{
extra = Extra;
return true;
}
extra = null;
return false;
}
public DictionaryExpressionData Extra { get; }
public MappingToken Filter { get; }
}
/// <summary>
/// Represents the sequence "strategy.matrix.exclude"
/// </summary>
private sealed class MatrixExclude
{
public MatrixExclude(
TemplateContext context,
DictionaryExpressionData vectors,
SequenceToken excludeSequence)
{
// Convert to excludes sets
if (excludeSequence?.Count > 0)
{
foreach (var excludeItem in excludeSequence)
{
var excludeMapping = excludeItem.AssertMapping("matrix excludes item");
// Check empty
if (excludeMapping.Count == 0)
{
context.Error(excludeMapping, $"Matrix exclude filter must not be empty");
continue;
}
// Validate first-level keys
foreach (var excludePair in excludeMapping)
{
var excludeKey = excludePair.Key.AssertString("matrix excludes item key");
if (!vectors.ContainsKey(excludeKey.Value))
{
context.Error(excludeKey, $"Matrix exclude key '{excludeKey.Value}' does not match any key within the matrix");
continue;
}
}
// Add filter
m_filters.Add(new MatrixExcludeFilter(excludeMapping));
}
}
}
/// <summary>
/// Matches a vector from the cross product against each exclude filter.
///
/// For example, given the matrix:
/// arch: [x64, x86]
/// config: [release, debug]
/// exclude:
/// - arch: x86
/// config: release
///
/// This method would return the following:
/// Match( {arch: x64, config: release} ) => false
/// Match( {arch: x64, config: debug} ) => false
/// Match( {arch: x86, config: release} ) => true
/// Match( {arch: x86, config: debug} ) => false
/// </summary>
/// <param name="matrix">A vector of the cross product</param>
/// <param name="extra">Extra values to add to the vector</param>
/// <returns>True if the vector matched at least one exclude filter</returns>
public Boolean Match(DictionaryExpressionData matrix)
{
foreach (var filter in m_filters)
{
if (filter.Match(matrix))
{
return true;
}
}
return false;
}
private readonly List<MatrixExcludeFilter> m_filters = new List<MatrixExcludeFilter>();
}
/// <summary>
/// Represents an item within the sequence "strategy.matrix.exclude"
/// </summary>
private sealed class MatrixExcludeFilter : MatrixFilter
{
public MatrixExcludeFilter(MappingToken filter)
: base(filter)
{
}
public new Boolean Match(DictionaryExpressionData matrix)
{
return base.Match(matrix);
}
}
/// <summary>
/// Base class for matrix include/exclude filters. That is, an item within the
/// sequence "strategy.matrix.include" or within the sequence "strategy.matrix.exclude".
/// </summary>
private abstract class MatrixFilter
{
protected MatrixFilter(MappingToken matrixFilter)
{
// Traverse the structure and add an expression to compare each leaf node.
// For example, given the filter:
// versions:
// node-version: 12
// npm-version: 6
// config: release
// The following filter expressions would be created:
// - matrix.versions.node-version == 12
// - matrix.versions.npm-version == 6
// - matrix.config == 'release'
var state = new MappingState(null, matrixFilter) as TokenState;
while (state != null)
{
if (state.MoveNext())
{
// Leaf
if (state.Current is LiteralToken literal)
{
AddExpression(state.Path, literal);
}
// Mapping
else if (state.Current is MappingToken mapping)
{
state = new MappingState(state, mapping);
}
// Sequence
else if (state.Current is SequenceToken sequence)
{
state = new SequenceState(state, sequence);
}
else
{
throw new NotSupportedException($"Unexpected token type '{state.Current.Type}' when constructing matrix filter expressions");
}
}
else
{
state = state.Parent;
}
}
}
protected Boolean Match(DictionaryExpressionData matrix)
{
if (matrix.Count == 0)
{
throw new InvalidOperationException("Matrix filter cannot be empty");
}
foreach (var expression in m_expressions)
{
var result = expression.Evaluate(null, null, matrix, null);
if (result.IsFalsy)
{
return false;
}
}
return true;
}
private void AddExpression(
String path,
LiteralToken literal)
{
var expressionLiteral = default(String);
switch (literal.Type)
{
case TokenType.Null:
expressionLiteral = ExpressionConstants.Null;
break;
case TokenType.Boolean:
var booleanToken = literal as BooleanToken;
expressionLiteral = ExpressionUtility.ConvertToParseToken(booleanToken.Value);
break;
case TokenType.Number:
var numberToken = literal as NumberToken;
expressionLiteral = ExpressionUtility.ConvertToParseToken(numberToken.Value);
break;
case TokenType.String:
var stringToken = literal as StringToken;
expressionLiteral = ExpressionUtility.ConvertToParseToken(stringToken.Value);
break;
default:
throw new NotSupportedException($"Unexpected literal type '{literal.Type}'");
}
var parser = new ExpressionParser();
var expressionString = $"{path} == {expressionLiteral}";
var expression = parser.CreateTree(expressionString, null, s_matrixFilterNamedValues, null);
m_expressions.Add(expression);
}
/// <summary>
/// Used to maintain state while traversing a mapping when building filter expressions.
/// See <see cref="MatrixFilter"/> for more info.
/// </summary>
private sealed class MappingState : TokenState
{
public MappingState(
TokenState parent,
MappingToken mapping)
: base(parent)
{
m_mapping = mapping;
m_index = -1;
}
public override Boolean MoveNext()
{
if (++m_index < m_mapping.Count)
{
var pair = m_mapping[m_index];
var keyLiteral = pair.Key.AssertString("matrix filter key");
Current = pair.Value;
var parentPath = Parent?.Path ?? WorkflowTemplateConstants.Matrix;
Path = $"{parentPath}[{ExpressionUtility.ConvertToParseToken(keyLiteral.Value)}]";
return true;
}
else
{
Current = null;
Path = null;
return false;
}
}
private MappingToken m_mapping;
private Int32 m_index;
}
/// <summary>
/// Used to maintain state while traversing a sequence when building filter expressions.
/// See <see cref="MatrixFilter"/> for more info.
/// </summary>
private sealed class SequenceState : TokenState
{
public SequenceState(
TokenState parent,
SequenceToken sequence)
: base(parent)
{
m_sequence = sequence;
m_index = -1;
}
public override Boolean MoveNext()
{
if (++m_index < m_sequence.Count)
{
Current = m_sequence[m_index];
var parentPath = Parent?.Path ?? WorkflowTemplateConstants.Matrix;
Path = $"{parentPath}[{ExpressionUtility.ConvertToParseToken((Double)m_index)}]";
return true;
}
else
{
Current = null;
Path = null;
return false;
}
}
private SequenceToken m_sequence;
private Int32 m_index;
}
/// <summary>
/// Used to maintain state while traversing a mapping/sequence when building filter expressions.
/// See <see cref="MatrixFilter"/> for more info.
/// </summary>
private abstract class TokenState
{
protected TokenState(TokenState parent)
{
Parent = parent;
}
public TemplateToken Current { get; protected set; }
public TokenState Parent { get; }
/// <summary>
/// The expression used to reference the current position within the structure.
/// For example: matrix.node-version
/// </summary>
public String Path { get; protected set; }
public abstract Boolean MoveNext();
}
/// <summary>
/// Represents the "matrix" context within an include/exclude expression
/// </summary>
private sealed class MatrixNamedValue : NamedValue
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
return context.State;
}
}
private static readonly INamedValueInfo[] s_matrixFilterNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<MatrixNamedValue>(WorkflowTemplateConstants.Matrix),
};
private readonly List<IExpressionNode> m_expressions = new List<IExpressionNode>();
}
private readonly TemplateContext m_context;
private readonly String m_jobName;
private readonly DictionaryExpressionData m_vectors = new DictionaryExpressionData();
private SequenceToken m_excludeSequence;
private SequenceToken m_includeSequence;
}
}

View File

@@ -0,0 +1,44 @@
using System;
namespace GitHub.Actions.WorkflowParser.Conversion
{
internal static class PermissionLevelExtensions
{
public static bool IsLessThanOrEqualTo(
this PermissionLevel permissionLevel,
PermissionLevel other)
{
switch (permissionLevel, other)
{
case (PermissionLevel.NoAccess, PermissionLevel.NoAccess):
case (PermissionLevel.NoAccess, PermissionLevel.Read):
case (PermissionLevel.NoAccess, PermissionLevel.Write):
case (PermissionLevel.Read, PermissionLevel.Read):
case (PermissionLevel.Read, PermissionLevel.Write):
case (PermissionLevel.Write, PermissionLevel.Write):
return true;
case (PermissionLevel.Read, PermissionLevel.NoAccess):
case (PermissionLevel.Write, PermissionLevel.NoAccess):
case (PermissionLevel.Write, PermissionLevel.Read):
return false;
default:
throw new ArgumentException($"Invalid enum comparison: {permissionLevel} and {other}");
}
}
public static string ConvertToString(this PermissionLevel permissionLevel)
{
switch (permissionLevel)
{
case PermissionLevel.NoAccess:
return "none";
case PermissionLevel.Read:
return "read";
case PermissionLevel.Write:
return "write";
default:
throw new NotSupportedException($"invalid permission level found. {permissionLevel}");
}
}
}
}

View File

@@ -0,0 +1,37 @@
namespace GitHub.Actions.WorkflowParser.Conversion
{
internal sealed class PermissionLevelViolation
{
public PermissionLevelViolation(string permissionName, PermissionLevel requestedPermissions, PermissionLevel allowedPermissions)
{
PermissionName = permissionName;
RequestedPermissionLevel = requestedPermissions;
AllowedPermissionLevel = allowedPermissions;
}
public string PermissionName
{
get;
}
public PermissionLevel RequestedPermissionLevel
{
get;
}
public PermissionLevel AllowedPermissionLevel
{
get;
}
public string RequestedPermissionLevelString()
{
return $"{PermissionName}: {RequestedPermissionLevel.ConvertToString()}";
}
public string AllowedPermissionLevelString()
{
return $"{PermissionName}: {AllowedPermissionLevel.ConvertToString()}";
}
}
}

View File

@@ -0,0 +1,79 @@
#nullable enable
using System;
using System.Linq;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
namespace GitHub.Actions.WorkflowParser.Conversion
{
internal static class PermissionsHelper
{
/// <summary>
/// Validates permissions requested in a reusable workflow do not exceed allowed permissions
/// </summary>
/// <param name="context">The template context</param>
/// <param name="workflowJob">The reusable workflow job</param>
/// <param name="embeddedJob">(Optional) Used when formatting errors related to an embedded job within the reusable workflow</param>
/// <param name="requested">The permissions within the reusable workflow file. These may be defined either at the root of the file, or may be defined on a job within the file.</param>
/// <param name="explicitMax">(Optional) The max permissions explicitly allowed by the caller</param>
/// <param name="permissionsPolicy">The default permissions policy</param>
/// <param name="isTrusted">Indicates whether the reusable workflow exists within the same trust boundary (e.g. enterprise/organization) as a the root workflow</param>
internal static void ValidateEmbeddedPermissions(
TemplateContext context,
ReusableWorkflowJob workflowJob,
IJob? embeddedJob,
Permissions requested,
Permissions? explicitMax,
string permissionsPolicy,
bool isTrusted)
{
if (requested == null)
{
return;
}
var effectiveMax = explicitMax ?? CreatePermissionsFromPolicy(context, permissionsPolicy, includeIdToken: isTrusted, includeModels: context.GetFeatures().AllowModelsPermission);
if (requested.ViolatesMaxPermissions(effectiveMax, out var permissionLevelViolations))
{
var requestedStr = string.Join(", ", permissionLevelViolations.Select(x => x.RequestedPermissionLevelString()));
var allowedStr = string.Join(", ", permissionLevelViolations.Select(x => x.AllowedPermissionLevelString()));
if (embeddedJob != null)
{
context.Error(workflowJob.Id, $"Error calling workflow '{workflowJob.Ref}'. The nested job '{embeddedJob.Id!.Value}' is requesting '{requestedStr}', but is only allowed '{allowedStr}'.");
}
else
{
context.Error(workflowJob.Id, $"Error calling workflow '{workflowJob.Ref}'. The workflow is requesting '{requestedStr}', but is only allowed '{allowedStr}'.");
}
}
}
/// <summary>
/// Creates permissions based on policy
/// </summary>
/// <param name="context">The template context</param>
/// <param name="permissionsPolicy">The permissions policy</param>
/// <param name="includeIdToken">Indicates whether the permissions should include an ID token</param>
private static Permissions CreatePermissionsFromPolicy(
TemplateContext context,
string permissionsPolicy,
bool includeIdToken,
bool includeModels)
{
switch (permissionsPolicy)
{
case WorkflowConstants.PermissionsPolicy.LimitedRead:
return new Permissions(PermissionLevel.NoAccess, includeIdToken: false, includeAttestations: false, includeModels: false)
{
Contents = PermissionLevel.Read,
Packages = PermissionLevel.Read,
};
case WorkflowConstants.PermissionsPolicy.Write:
return new Permissions(PermissionLevel.Write, includeIdToken: includeIdToken, includeAttestations: true, includeModels: includeModels);
default:
throw new ArgumentException($"Unexpected permission policy: '{permissionsPolicy}'");
}
}
}
}

View File

@@ -0,0 +1,272 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.Actions.Expressions;
using GitHub.Actions.Expressions.Sdk;
using GitHub.Actions.WorkflowParser.Conversion;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.Conversion
{
using GitHub.Actions.WorkflowParser.ObjectTemplating;
/// <summary>
/// Loads reusable workflows
/// </summary>
internal sealed class ReusableWorkflowsLoader
{
private ReusableWorkflowsLoader(
IServerTraceWriter serverTrace,
ITraceWriter trace,
ParseOptions options,
WorkflowUsage usage,
TemplateContext context,
WorkflowTemplate workflowTemplate,
YamlTemplateLoader loader,
String permissionPolicy,
IDictionary<string, ReferencedWorkflow> referencedWorkflows)
{
m_serverTrace = serverTrace ?? new EmptyServerTraceWriter();
m_trace = trace ?? new EmptyTraceWriter();
m_parseOptions = new ParseOptions(options ?? throw new ArgumentNullException(nameof(options)));
m_usage = usage ?? throw new ArgumentNullException(nameof(usage));
m_context = context ?? throw new ArgumentNullException(nameof(context));
m_workflowTemplate = workflowTemplate ?? throw new ArgumentNullException(nameof(workflowTemplate));
m_loader = loader ?? throw new ArgumentNullException(nameof(loader));
m_permissionPolicy = permissionPolicy ?? throw new ArgumentNullException(nameof(permissionPolicy));
m_referencedWorkflows = referencedWorkflows ?? throw new ArgumentNullException(nameof(referencedWorkflows));
}
/// <summary>
/// Loads reusable workflows if not in an error state.
///
/// Any new errors are recorded to both <c ref="TemplateContext.Errors" /> and <c ref="WorkflowTemplate.Errors" />.
/// </summary>
public static void Load(
IServerTraceWriter serverTrace,
ITraceWriter trace,
ParseOptions options,
WorkflowUsage usage,
TemplateContext context,
WorkflowTemplate workflowTemplate,
YamlTemplateLoader loader,
String permissionPolicy,
IDictionary<string, ReferencedWorkflow> referencedWorkflows)
{
new ReusableWorkflowsLoader(serverTrace, trace, options, usage, context, workflowTemplate, loader, permissionPolicy, referencedWorkflows)
.Load();
}
/// <summary>
/// Refer overload
/// </summary>
private void Load()
{
// Skip reusable workflows?
if (m_parseOptions.SkipReusableWorkflows)
{
return;
}
// Check errors
if (m_context.Errors.Count > 0)
{
return;
}
// Note, the "finally" block appends context.Errors to workflowTemplate
var hasReusableWorkflowJob = false;
try
{
foreach (var job in m_workflowTemplate.Jobs)
{
// Load reusable workflow
if (job is ReusableWorkflowJob workflowJob)
{
hasReusableWorkflowJob = true;
LoadRecursive(workflowJob);
// Check errors
if (m_context.Errors.Count > 0)
{
return;
}
}
}
}
catch (ReferencedWorkflowNotFoundException)
{
// Long term, catch TemplateUserException and let others bubble
throw;
}
catch (Exception ex)
{
m_context.Errors.Add(ex);
}
finally
{
// Append context.Errors to workflowTemplate
if (m_context.Errors.Count > 0)
{
foreach (var error in m_context.Errors)
{
m_workflowTemplate.Errors.Add(new WorkflowValidationError(error.Code, error.Message));
}
}
// Update WorkflowTemplate.FileTable with referenced workflows
if (hasReusableWorkflowJob)
{
m_workflowTemplate.FileTable.Clear();
m_workflowTemplate.FileTable.AddRange(m_context.GetFileTable());
}
}
}
/// <summary>
/// This loads referenced workflow by parsing the workflow file and converting to workflow template WorkflowJob.
/// </summary>
private void LoadRecursive(
ReusableWorkflowJob workflowJob,
int depth = 1)
{
// Check depth
if (depth > m_parseOptions.MaxNestedReusableWorkflowsDepth)
{
throw new Exception($"Nested reusable workflow depth exceeded {m_parseOptions.MaxNestedReusableWorkflowsDepth}.");
}
TemplateToken tokens;
// Load the reusable workflow
try
{
// Fully qualify workflow ref
workflowJob.Ref = FullyQualifyWorkflowRef(m_context, workflowJob.Ref, m_referencedWorkflows);
var isTrusted = IsReferencedWorkflowTrusted(workflowJob.Ref.Value);
// Parse template tokens
tokens = m_loader.ParseWorkflow(m_context, workflowJob.Ref.Value);
// Gather telemetry
m_usage.Gather(m_context, tokens);
// Check errors
if (m_context.Errors.Count > 0)
{
// Short-circuit
return;
}
// Convert to workflow types
WorkflowTemplateConverter.ConvertToReferencedWorkflow(m_context, tokens, workflowJob, m_permissionPolicy, isTrusted);
// Check errors
if (m_context.Errors.Count > 0)
{
// Short-circuit
return;
}
}
finally
{
// Prefix errors with caller file/line/col
PrefixErrorsWithCallerInfo(workflowJob);
}
// Load nested reusable workflows
foreach (var nestedJob in workflowJob.Jobs)
{
if (nestedJob is ReusableWorkflowJob nestedWorkflowJob)
{
// Recurse
LoadRecursive(nestedWorkflowJob, depth + 1);
// Check errors
if (m_context.Errors.Count > 0)
{
return;
}
}
}
}
/// <summary>
/// For the given token and referencedWorkflows, resolve the workflow reference (i.e. token value)
/// This ensures that the workflow reference is the fully qualified form (nwo+path+version) even when calling local workflows without nwo or version
/// </summary>
internal static StringToken FullyQualifyWorkflowRef(
TemplateContext context,
StringToken workflowJobRef,
IDictionary<string, ReferencedWorkflow> referencedWorkflows)
{
if (!workflowJobRef.Value.StartsWith(WorkflowTemplateConstants.LocalPrefix))
{
return workflowJobRef;
}
var callerPath = context.GetFileName(workflowJobRef.FileId.Value);
if (!referencedWorkflows.TryGetValue(callerPath, out ReferencedWorkflow callerWorkflow) || callerWorkflow == null)
{
throw new ReferencedWorkflowNotFoundException($"Cannot find the caller workflow from the referenced workflows: '{callerPath}'");
}
var filePath = workflowJobRef.Value.Substring(WorkflowTemplateConstants.LocalPrefix.Length);
var path = $"{callerWorkflow.Repository}/{filePath}@{callerWorkflow.ResolvedSha}";
return new StringToken(workflowJobRef.FileId, workflowJobRef.Line, workflowJobRef.Column, path);
}
/// <summary>
/// Prefixes all error messages with the caller file/line/column.
/// </summary>
private void PrefixErrorsWithCallerInfo(ReusableWorkflowJob workflowJob)
{
if (m_context.Errors.Count == 0)
{
return;
}
var callerFile = m_context.GetFileName(workflowJob.Ref.FileId.Value);
for (int i = 0; i < m_context.Errors.Count; i++)
{
var errorMessage = m_context.Errors.GetMessage(i);
if (String.IsNullOrEmpty(errorMessage) || !errorMessage.StartsWith(callerFile))
{
// when there is no caller file in the error message, we add it for annotation
m_context.Errors.PrefixMessage(
i,
TemplateStrings.CalledWorkflowNotValidWithErrors(
callerFile,
TemplateStrings.LineColumn(workflowJob.Ref.Line, workflowJob.Ref.Column)));
}
}
}
/// <summary>
/// Checks if the given workflowJobRefValue is trusted
/// </summary>
private bool IsReferencedWorkflowTrusted(String workflowJobRefValue)
{
if (m_referencedWorkflows.TryGetValue(workflowJobRefValue, out ReferencedWorkflow referencedWorkflow) &&
referencedWorkflow != null)
{
return referencedWorkflow.IsTrusted();
}
return false;
}
private readonly TemplateContext m_context;
private readonly YamlTemplateLoader m_loader;
private readonly ParseOptions m_parseOptions;
private readonly string m_permissionPolicy;
private readonly IDictionary<string, ReferencedWorkflow> m_referencedWorkflows;
private readonly IServerTraceWriter m_serverTrace;
private readonly ITraceWriter m_trace;
private readonly WorkflowUsage m_usage;
private readonly WorkflowTemplate m_workflowTemplate;
}
}

View File

@@ -0,0 +1,76 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using GitHub.Actions.Expressions.Data;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.Conversion
{
internal static class TemplateTokenExtensions
{
public static ArrayExpressionData ToExpressionData(this SequenceToken sequence)
{
var token = sequence as TemplateToken;
var expressionData = token.ToExpressionData();
return expressionData.AssertArray("converted sequence token");
}
public static DictionaryExpressionData ToExpressionData(this MappingToken mapping)
{
var token = mapping as TemplateToken;
var expressionData = token.ToExpressionData();
return expressionData.AssertDictionary("converted mapping token");
}
public static ExpressionData ToExpressionData(this TemplateToken token)
{
switch (token.Type)
{
case TokenType.Mapping:
var mapping = token as MappingToken;
var dictionary = new DictionaryExpressionData();
if (mapping.Count > 0)
{
foreach (var pair in mapping)
{
var keyLiteral = pair.Key.AssertString("dictionary context data key");
var key = keyLiteral.Value;
var value = pair.Value.ToExpressionData();
dictionary.Add(key, value);
}
}
return dictionary;
case TokenType.Sequence:
var sequence = token as SequenceToken;
var array = new ArrayExpressionData();
if (sequence.Count > 0)
{
foreach (var item in sequence)
{
array.Add(item.ToExpressionData());
}
}
return array;
case TokenType.Null:
return null;
case TokenType.Boolean:
var boolean = token as BooleanToken;
return new BooleanExpressionData(boolean.Value);
case TokenType.Number:
var number = token as NumberToken;
return new NumberExpressionData(number.Value);
case TokenType.String:
var stringToken = token as StringToken;
return new StringExpressionData(stringToken.Value);
default:
throw new NotSupportedException($"Unexpected {nameof(TemplateToken)} type '{token.Type}'");
}
}
}
}

View File

@@ -0,0 +1,63 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
namespace GitHub.Actions.WorkflowParser.Conversion
{
/// <summary>
/// Loads the schema for workflows
/// </summary>
internal static class WorkflowSchemaFactory
{
/// <summary>
/// Loads the template schema for the specified features.
/// </summary>
internal static TemplateSchema GetSchema(WorkflowFeatures features)
{
if (features == null)
{
throw new System.ArgumentNullException(nameof(features));
}
// Find resource names corresponding to enabled features
var resourceNames = WorkflowFeatures.Names
.Where(x => features.GetFeature(x)) // Enabled features only
.Select(x => string.Concat(c_resourcePrefix, "-", x, c_resourceSuffix)) // To resource name
.Where(x => s_resourceNames.Contains(x)) // Resource must exist
.ToList();
// More than one resource found?
if (resourceNames.Count > 1)
{
throw new NotSupportedException("Failed to load workflow schema. Only one feature flag with schema changes can be enabled at a time.");
}
var resourceName = resourceNames.FirstOrDefault() ?? c_defaultResourceName;
return s_schemas.GetOrAdd(
resourceName,
(resourceName) =>
{
var assembly = Assembly.GetExecutingAssembly();
var json = default(String);
using (var stream = assembly.GetManifestResourceStream(resourceName)!)
using (var streamReader = new StreamReader(stream))
{
json = streamReader.ReadToEnd();
}
var objectReader = new JsonObjectReader(null, json);
return TemplateSchema.Load(objectReader);
});
}
private const string c_resourcePrefix = "GitHub.Actions.WorkflowParser.workflow-v1.0";
private const string c_resourceSuffix = ".json";
private const string c_defaultResourceName = c_resourcePrefix + c_resourceSuffix;
private static readonly HashSet<string> s_resourceNames = Assembly.GetExecutingAssembly().GetManifestResourceNames().ToHashSet(StringComparer.Ordinal);
private static readonly ConcurrentDictionary<string, TemplateSchema> s_schemas = new(StringComparer.Ordinal);
}
}

View File

@@ -0,0 +1,121 @@
using System;
namespace GitHub.Actions.WorkflowParser.Conversion
{
internal static class WorkflowTemplateConstants
{
public const String Always = "always";
public const String BooleanNeedsContext = "boolean-needs-context";
public const String BooleanStepsContext = "boolean-steps-context";
public const String BooleanStrategyContext = "boolean-strategy-context";
public const String CancelInProgress = "cancel-in-progress";
public const String CancelTimeoutMinutes = "cancel-timeout-minutes";
public const String Cancelled = "cancelled";
public const String Concurrency = "concurrency";
public const String Container = "container";
public const String ContinueOnError = "continue-on-error";
public const String Credentials = "credentials";
public const String Default = "default";
public const String Defaults = "defaults";
public const String Description = "description";
public const String DockerUriPrefix = "docker://";
public const String EmbeddedConcurrency = "embedded-concurrency";
public const String Env = "env";
public const String Ent = "ent";
public const String Enterprise = "enterprise";
public const String Environment = "environment";
public const String Event = "event";
public const String EventName = "event_name";
public const String EventPattern = "github.event";
public const String Exclude = "exclude";
public const String FailFast = "fail-fast";
public const String Failure = "failure";
public const String GitHub = "github";
public const String Group = "group";
public const String HashFiles = "hashFiles";
public const String Id = "id";
public const String If = "if";
public const String Image = "image";
public const String ImageName = "image-name";
public const String CustomImageVersion = "version";
public const String Include = "include";
public const String Inherit = "inherit";
public const String Inputs = "inputs";
public const String InputsPattern = "inputs.*";
public const String Job = "job";
public const String JobConcurrency = "job-concurrency";
public const String JobDefaultsRun = "job-defaults-run";
public const String JobEnvironment = "job-environment";
public const String JobIfResult = "job-if-result";
public const String JobOutputs = "job-outputs";
public const String Jobs = "jobs";
public const String JobsPattern = "jobs.*";
public const String JobsOutputsPattern = "jobs.*.outputs";
public const String Labels = "labels";
public const String LocalPrefix = "./";
public const String Matrix = "matrix";
public const String MaxParallel = "max-parallel";
public const String Name = "name";
public const String Needs = "needs";
public const String NumberNeedsContext = "number-needs-context";
public const String NumberStepsContext = "number-steps-context";
public const String NumberStrategyContext = "number-strategy-context";
public const String On = "on";
public const String Options = "options";
public const String Org = "org";
public const String Organization = "organization";
public const String Outputs = "outputs";
public const String OutputsPattern = "needs.*.outputs";
public const String Password = "password";
public const String Permissions = "permissions";
public const String Pool = "pool";
public const String Ports = "ports";
public const String Required = "required";
public const String Result = "result";
public const String Run = "run";
public const String RunName = "run-name";
public const String Runner = "runner";
public const String RunsOn = "runs-on";
public const String Secret = "secret";
public const String Secrets = "secrets";
public const String Services = "services";
public const String Shell = "shell";
public const String Skipped = "skipped";
public const String Slash = "/";
public const String Snapshot = "snapshot";
public const String StepEnv = "step-env";
public const String StepIfResult = "step-if-result";
public const String StepWith = "step-with";
public const String Steps = "steps";
public const String Strategy = "strategy";
public const String StringNeedsContext = "string-needs-context";
public const String StringRunnerContextNoSecrets = "string-runner-context-no-secrets";
public const String StringStepsContext = "string-steps-context";
public const String StringStrategyContext = "string-strategy-context";
public const String Success = "success";
public const String TimeoutMinutes = "timeout-minutes";
public const String Type = "type";
public const String TypeString = "string";
public const String TypeBoolean = "boolean";
public const String TypeNumber = "number";
public const String Url = "url";
public const String Username = "username";
public const String Uses = "uses";
public const String Vars = "vars";
public const String VarsPattern = "vars.*";
public const String VmImage = "vmImage";
public const String Volumes = "volumes";
public const String With = "with";
public const String Workflow = "workflow";
public const String Workflow_1_0 = "workflow-v1.0";
public const String WorkflowCall = "workflow_call";
public const String WorkflowCallInputs = "workflow-call-inputs";
public const String WorkflowCallOutputs = "workflow-call-outputs";
public const String WorkflowConcurrency = "workflow-concurrency";
public const String WorkflowDispatch = "workflow_dispatch";
public const String WorkflowJobSecrets = "workflow-job-secrets";
public const String WorkflowJobWith = "workflow-job-with";
public const String WorkflowRoot = "workflow-root";
public const String WorkingDirectory = "working-directory";
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,805 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
using YamlDotNet.Core;
using YamlDotNet.Core.Events;
namespace GitHub.Actions.WorkflowParser.Conversion
{
/// <summary>
/// Converts a YAML file into a TemplateToken
/// </summary>
internal sealed class YamlObjectReader : IObjectReader
{
internal YamlObjectReader(
Int32? fileId,
TextReader input,
Boolean allowAnchors = false,
Telemetry telemetry = null)
{
m_fileId = fileId;
m_parser = new Parser(input);
m_allowAnchors = allowAnchors;
m_telemetry = telemetry ?? new Telemetry();
m_events = new List<ParsingEvent>();
m_anchors = new Dictionary<String, Int32>();
m_replay = new Stack<YamlReplayState>();
}
public Boolean AllowLiteral(out LiteralToken value)
{
if (EvaluateCurrent() is Scalar scalar)
{
// Tag specified
if (!String.IsNullOrEmpty(scalar.Tag))
{
// String tag
if (String.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
// Not plain style
if (scalar.Style != ScalarStyle.Plain)
{
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
}
// Boolean, Float, Integer, or Null
switch (scalar.Tag)
{
case c_booleanTag:
value = ParseBoolean(scalar);
break;
case c_floatTag:
value = ParseFloat(scalar);
break;
case c_integerTag:
value = ParseInteger(scalar);
break;
case c_nullTag:
value = ParseNull(scalar);
break;
default:
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
}
MoveNext();
return true;
}
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
if (scalar.Style == ScalarStyle.Plain)
{
if (MatchNull(scalar, out var nullToken))
{
value = nullToken;
}
else if (MatchBoolean(scalar, out var booleanToken))
{
value = booleanToken;
}
else if (MatchInteger(scalar, out var numberToken) ||
MatchFloat(scalar, out numberToken))
{
value = numberToken;
}
else
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
}
MoveNext();
return true;
}
// Otherwise assume string
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceStart(out SequenceToken value)
{
if (EvaluateCurrent() is SequenceStart sequenceStart)
{
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceEnd()
{
if (EvaluateCurrent() is SequenceEnd)
{
MoveNext();
return true;
}
return false;
}
public Boolean AllowMappingStart(out MappingToken value)
{
if (EvaluateCurrent() is MappingStart mappingStart)
{
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowMappingEnd()
{
if (EvaluateCurrent() is MappingEnd)
{
MoveNext();
return true;
}
return false;
}
/// <summary>
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
/// </summary>
public void ValidateEnd()
{
if (EvaluateCurrent() is DocumentEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document end parse event");
}
if (EvaluateCurrent() is StreamEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream end parse event");
}
if (MoveNext())
{
throw new InvalidOperationException("Expected end of parse events");
}
}
/// <summary>
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
/// </summary>
public void ValidateStart()
{
if (EvaluateCurrent() != null)
{
throw new InvalidOperationException("Unexpected parser state");
}
if (!MoveNext())
{
throw new InvalidOperationException("Expected a parse event");
}
if (EvaluateCurrent() is StreamStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream start parse event");
}
if (EvaluateCurrent() is DocumentStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document start parse event");
}
}
private ParsingEvent EvaluateCurrent_Legacy()
{
if (m_current == null)
{
m_current = m_parser.Current;
if (m_current != null)
{
if (m_current is Scalar scalar)
{
// Verify not using achors
if (scalar.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
}
}
else if (m_current is MappingStart mappingStart)
{
// Verify not using achors
if (mappingStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
}
}
else if (m_current is SequenceStart sequenceStart)
{
// Verify not using achors
if (sequenceStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
}
}
else if (!(m_current is MappingEnd) &&
!(m_current is SequenceEnd) &&
!(m_current is DocumentStart) &&
!(m_current is DocumentEnd) &&
!(m_current is StreamStart) &&
!(m_current is StreamEnd))
{
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
}
}
}
return m_current;
}
private ParsingEvent EvaluateCurrent()
{
if (!m_allowAnchors)
{
return EvaluateCurrent_Legacy();
}
return m_current;
}
private Boolean MoveNext_Legacy()
{
m_current = null;
return m_parser.MoveNext();
}
private Boolean MoveNext()
{
if (!m_allowAnchors)
{
return MoveNext_Legacy();
}
// Replaying an anchor?
// Adjust depth.
// Pop if done.
if (m_replay.Count > 0)
{
var replay = m_replay.Peek();
if (m_current is Scalar)
{
// Done?
if (replay.Depth == 0)
{
// Pop
m_replay.Pop();
}
}
else if (m_current is SequenceStart || m_current is MappingStart)
{
// Increment depth
replay.Depth++;
}
else if (m_current is SequenceEnd || m_current is MappingEnd)
{
// Decrement depth
replay.Depth--;
// Done?
if (replay.Depth == 0)
{
// Pop
m_replay.Pop();
}
}
}
// Still replaying?
if (m_replay.Count > 0)
{
var replay = m_replay.Peek();
// Move next
replay.Index++;
// Store current
m_current = m_events[replay.Index];
}
// Not replaying
else
{
// Move next
if (!m_parser.MoveNext())
{
// Clear current
m_current = null;
// Short-circuit
return false;
}
// Store current
m_current = m_parser.Current;
// Store event
m_events.Add(m_current);
// Anchor?
var anchor = (m_current as NodeEvent)?.Anchor;
if (anchor != null)
{
// Not allowed?
if (!m_allowAnchors)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{anchor}'");
}
// Validate node type
if (m_current is not Scalar && m_current is not MappingStart && m_current is not SequenceStart)
{
throw new InvalidOperationException($"Unexpected node type with anchor '{anchor}': {m_current.GetType().Name}");
}
// Store anchor index
m_anchors[anchor] = m_events.Count - 1;
// Count anchors
m_telemetry.YamlAnchors++;
}
// Count aliases
if (m_current is AnchorAlias)
{
m_telemetry.YamlAliases++;
}
// Validate node type
if (m_current is not Scalar &&
m_current is not MappingStart &&
m_current is not MappingEnd &&
m_current is not SequenceStart &&
m_current is not SequenceEnd &&
m_current is not DocumentStart &&
m_current is not DocumentEnd &&
m_current is not StreamStart &&
m_current is not StreamEnd &&
m_current is not AnchorAlias)
{
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
}
}
// Alias?
if (m_current is AnchorAlias alias)
{
// Anchor index
if (!m_anchors.TryGetValue(alias.Value, out var anchorIndex))
{
throw new InvalidOperationException($"Unknown anchor '{alias.Value}'");
}
// Move to anchor
m_current = m_events[anchorIndex];
// Push replay state
m_replay.Push(new YamlReplayState { Index = anchorIndex, Depth = 0 });
}
// Max nodes traversed?
m_numNodes++;
if (m_numNodes > c_maxYamlNodes)
{
throw new InvalidOperationException("Maximum YAML nodes exceeded");
}
return true;
}
private BooleanToken ParseBoolean(Scalar scalar)
{
if (MatchBoolean(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_booleanTag); // throws
return default;
}
private NumberToken ParseFloat(Scalar scalar)
{
if (MatchFloat(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_floatTag); // throws
return default;
}
private NumberToken ParseInteger(Scalar scalar)
{
if (MatchInteger(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_integerTag); // throws
return default;
}
private NullToken ParseNull(Scalar scalar)
{
if (MatchNull(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_nullTag); // throws
return default;
}
private Boolean MatchBoolean(
Scalar scalar,
out BooleanToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "true":
case "True":
case "TRUE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
return true;
case "false":
case "False":
case "FALSE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
return true;
}
value = default;
return false;
}
private Boolean MatchFloat(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
switch (str)
{
case ".inf":
case ".Inf":
case ".INF":
case "+.inf":
case "+.Inf":
case "+.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
return true;
case "-.inf":
case "-.Inf":
case "-.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
return true;
case ".nan":
case ".NaN":
case ".NAN":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
return true;
}
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
// Skip leading sign
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
// Check for integer portion
var length = str.Length;
var hasInteger = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasInteger = true;
index++;
}
// Check for decimal point
var hasDot = false;
if (index < length && str[index] == '.')
{
hasDot = true;
index++;
}
// Check for decimal portion
var hasDecimal = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasDecimal = true;
index++;
}
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
if ((hasDot && hasDecimal) || hasInteger)
{
// Check for end
if (index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
// Check [eE][-+]?[0-9]
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
{
index++;
// Skip sign
if (index < length && (str[index] == '-' || str[index] == '+'))
{
index++;
}
// Check for exponent
var hasExponent = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasExponent = true;
index++;
}
// Check for end
if (hasExponent && index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
}
}
}
value = default;
return false;
}
private Boolean MatchInteger(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [0-9]+
var firstChar = str[0];
if (firstChar >= '0' && firstChar <= '9' &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for (-|+)[0-9]+
else if ((firstChar == '-' || firstChar == '+') &&
str.Length > 1 &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0x[0-9a-fA-F]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'x' &&
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
{
// Try parse
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0o[0-9]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'o' &&
str.Skip(2).All(x => x >= '0' && x <= '7'))
{
// Try parse
var integerValue = default(Int32);
try
{
integerValue = Convert.ToInt32(str.Substring(2), 8);
}
// Otherwise exceeds range
catch (Exception)
{
ThrowInvalidValue(scalar, c_integerTag); // throws
}
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
}
value = default;
return false;
}
private Boolean MatchNull(
Scalar scalar,
out NullToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "":
case "null":
case "Null":
case "NULL":
case "~":
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
return true;
}
value = default;
return false;
}
private void ThrowInvalidValue(
Scalar scalar,
String tag)
{
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{tag}'");
}
/// <summary>
/// The maximum number of YAML nodes allowed when parsing a file. A single YAML node may be
/// encountered multiple times due to YAML anchors.
///
/// Note, depth and maximum accumulated bytes are tracked in an outer layer. The goal of this
/// layer is to prevent YAML anchors from causing excessive node traversal.
/// </summary>
private const int c_maxYamlNodes = 50000;
/// <summary>
/// Boolean YAML tag
/// </summary>
private const String c_booleanTag = "tag:yaml.org,2002:bool";
/// <summary>
/// Float YAML tag
/// </summary>
private const String c_floatTag = "tag:yaml.org,2002:float";
/// <summary>
/// Integer YAML tag
/// </summary>
private const String c_integerTag = "tag:yaml.org,2002:int";
/// <summary>
/// Null YAML tag
/// </summary>
private const String c_nullTag = "tag:yaml.org,2002:null";
/// <summary>
/// String YAML tag
/// </summary>
private const String c_stringTag = "tag:yaml.org,2002:str";
/// <summary>
/// File ID
/// </summary>
private readonly Int32? m_fileId;
/// <summary>
/// Parser instance
/// </summary>
private readonly Parser m_parser;
/// <summary>
/// Current parsing event
/// </summary>
private ParsingEvent m_current;
/// <summary>
/// Indicates whether YAML anchors are allowed
/// </summary>
private readonly Boolean m_allowAnchors;
/// <summary>
/// Telemetry data
/// </summary>
private readonly Telemetry m_telemetry;
/// <summary>
/// Number of YAML nodes traversed
/// </summary>
private Int32 m_numNodes;
/// <summary>
/// All encountered parsing events
/// </summary>
private readonly List<ParsingEvent> m_events;
/// <summary>
/// Anchor event index map
/// </summary>
private readonly Dictionary<String, Int32> m_anchors;
/// <summary>
/// Stack of anchor replay states
/// </summary>
private readonly Stack<YamlReplayState> m_replay;
}
}

View File

@@ -0,0 +1,73 @@
using System;
using System.Globalization;
using System.IO;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using YamlDotNet.Core.Events;
namespace GitHub.Actions.WorkflowParser.Conversion
{
/// <summary>
/// Converts a TemplateToken into YAML
/// </summary>
internal sealed class YamlObjectWriter : IObjectWriter
{
internal YamlObjectWriter(StringWriter writer)
{
m_emitter = new YamlDotNet.Core.Emitter(writer);
}
public void WriteString(String value)
{
m_emitter.Emit(new Scalar(value ?? String.Empty));
}
public void WriteBoolean(Boolean value)
{
m_emitter.Emit(new Scalar(value ? "true" : "false"));
}
public void WriteNumber(Double value)
{
m_emitter.Emit(new Scalar(value.ToString("G15", CultureInfo.InvariantCulture)));
}
public void WriteNull()
{
m_emitter.Emit(new Scalar("null"));
}
public void WriteSequenceStart()
{
m_emitter.Emit(new SequenceStart(null, null, true, SequenceStyle.Block));
}
public void WriteSequenceEnd()
{
m_emitter.Emit(new SequenceEnd());
}
public void WriteMappingStart()
{
m_emitter.Emit(new MappingStart());
}
public void WriteMappingEnd()
{
m_emitter.Emit(new MappingEnd());
}
public void WriteStart()
{
m_emitter.Emit(new StreamStart());
m_emitter.Emit(new DocumentStart());
}
public void WriteEnd()
{
m_emitter.Emit(new DocumentEnd(isImplicit: true));
m_emitter.Emit(new StreamEnd());
}
private readonly YamlDotNet.Core.IEmitter m_emitter;
}
}

View File

@@ -0,0 +1,23 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
namespace GitHub.Actions.WorkflowParser.Conversion
{
/// <summary>
/// Index and depth while replaying a YAML anchor
/// </summary>
sealed class YamlReplayState
{
/// <summary>
/// Gets or sets the current node event index that is being replayed.
/// </summary>
public Int32 Index { get; set; }
/// <summary>
/// Gets or sets the depth within the current anchor that is being replayed.
/// When the depth reaches zero, the anchor replay is complete.
/// </summary>
public Int32 Depth { get; set; }
}
}

View File

@@ -0,0 +1,124 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.Conversion
{
/// <summary>
/// Loads a YAML file, and returns the parsed TemplateToken
/// </summary>
internal sealed class YamlTemplateLoader
{
public YamlTemplateLoader(
ParseOptions parseOptions,
IFileProvider fileProvider)
{
m_parseOptions = new ParseOptions(parseOptions);
m_fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider));
}
/// <summary>
/// Parses a workflow <c ref="TemplateToken" /> template.
///
/// Check <c ref="TemplateToken.Errors" /> for errors.
/// </summary>
public TemplateToken ParseWorkflow(
TemplateContext context,
String path)
{
var result = default(TemplateToken);
try
{
result = LoadFile(context, path, WorkflowTemplateConstants.WorkflowRoot);
}
catch (Exception ex)
{
context.Errors.Add(ex);
}
return result;
}
private TemplateToken LoadFile(
TemplateContext context,
String path,
String templateType)
{
if (context.Errors.Count > 0)
{
throw new InvalidOperationException("Expected error count to be 0 when attempting to load a new file");
}
// Is entry file?
var isEntryFile = m_referencedFiles.Count == 0;
// Root the path
path = m_fileProvider.ResolvePath(null, path);
// Validate max files
m_referencedFiles.Add(path);
if (m_parseOptions.MaxFiles > 0 && m_referencedFiles.Count > m_parseOptions.MaxFiles)
{
throw new InvalidOperationException($"The maximum file count of {m_parseOptions.MaxFiles} has been exceeded");
}
// Get the file ID
var fileId = context.GetFileId(path);
// Check the cache
if (!m_cache.TryGetValue(path, out String fileContent))
{
// Fetch the file
context.CancellationToken.ThrowIfCancellationRequested();
fileContent = m_fileProvider.GetFileContent(path);
// Validate max file size
if (fileContent.Length > m_parseOptions.MaxFileSize)
{
throw new InvalidOperationException($"{path}: The maximum file size of {m_parseOptions.MaxFileSize} characters has been exceeded");
}
// Cache
m_cache[path] = fileContent;
}
// Deserialize
var token = default(TemplateToken);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(fileId, stringReader, m_parseOptions.AllowAnchors, context.Telemetry);
token = TemplateReader.Read(context, templateType, yamlObjectReader, fileId, out _);
}
// Trace
if (!isEntryFile)
{
context.TraceWriter.Info(String.Empty);
}
context.TraceWriter.Info("# ");
context.TraceWriter.Info("# {0}", path);
context.TraceWriter.Info("# ");
return token;
}
/// <summary>
/// Cache of file content
/// </summary>
private readonly Dictionary<String, String> m_cache = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
private readonly IFileProvider m_fileProvider;
private readonly ParseOptions m_parseOptions;
/// <summary>
/// Tracks unique file references
/// </summary>
private readonly HashSet<String> m_referencedFiles = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
}
}

View File

@@ -0,0 +1,23 @@
#nullable disable // Temporary: should be removed and issues fixed manually
using System;
using System.Collections.Generic;
using System.Linq;
namespace GitHub.Actions.WorkflowParser
{
internal static class EnumerableExtensions
{
/// <summary>
/// Creates a HashSet with equality comparer <paramref name="comparer"/> based on the elements
/// in <paramref name="source"/>, using transformation function <paramref name="selector"/>.
/// </summary>
public static HashSet<TOut> ToHashSet<TIn, TOut>(
this IEnumerable<TIn> source,
Func<TIn, TOut> selector,
IEqualityComparer<TOut> comparer)
{
return new HashSet<TOut>(source.Select(selector), comparer);
}
}
}

View File

@@ -0,0 +1,52 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System.ComponentModel;
using System.Runtime.Serialization;
using System.Text.Json.Serialization;
namespace GitHub.Actions.WorkflowParser
{
// Actions service should not use this class at all.
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class FileInfo
{
[JsonConstructor]
public FileInfo()
{
}
private FileInfo(FileInfo infoToClone)
{
this.Path = infoToClone.Path;
this.NWO = infoToClone.NWO;
this.ResolvedRef = infoToClone.ResolvedRef;
this.ResolvedSha = infoToClone.ResolvedSha;
this.IsTrusted = infoToClone.IsTrusted;
this.IsRequired = infoToClone.IsRequired;
}
[DataMember(Name = "path", EmitDefaultValue = false)]
public string Path { get; set; }
[DataMember(Name = "nwo", EmitDefaultValue = false)]
public string NWO { get; set; }
[DataMember(Name = "resolved_ref", EmitDefaultValue = false)]
public string ResolvedRef { get; set; }
[DataMember(Name = "resolved_sha", EmitDefaultValue = false)]
public string ResolvedSha { get; set; }
[DataMember(Name = "is_trusted", EmitDefaultValue = false)]
public bool IsTrusted { get; set; }
[DataMember(Name = "is_required", EmitDefaultValue = false)]
public bool IsRequired { get; set; }
public FileInfo Clone()
{
return new FileInfo(this);
}
}
}

View File

@@ -0,0 +1,21 @@
using System.Runtime.Serialization;
namespace GitHub.Actions.WorkflowParser
{
/// <summary>
/// Information about concurrency setting parsed from YML
/// </summary>
[DataContract]
public sealed class GroupPermitSetting
{
public GroupPermitSetting(string group) {
Group = group;
}
[DataMember]
public string Group { get; set; }
[DataMember]
public bool CancelInProgress { get; set; }
}
}

View File

@@ -0,0 +1,11 @@
using System;
namespace GitHub.Actions.WorkflowParser
{
public interface IFileProvider
{
String GetFileContent(String path);
String ResolvePath(String defaultRoot, String path);
}
}

View File

@@ -0,0 +1,37 @@
#nullable enable
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
using Newtonsoft.Json;
namespace GitHub.Actions.WorkflowParser
{
[JsonConverter(typeof(IJobJsonConverter))]
public interface IJob
{
JobType Type
{
get;
}
StringToken? Id
{
get;
set;
}
IList<StringToken> Needs
{
get;
}
public Permissions? Permissions
{
get;
set;
}
IJob Clone(bool omitSource);
}
}

View File

@@ -0,0 +1,94 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Reflection;
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.WorkflowParser
{
internal sealed class IJobJsonConverter : JsonConverter
{
public override Boolean CanWrite
{
get
{
return false;
}
}
public override Boolean CanConvert(Type objectType)
{
return typeof(IJob).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo());
}
public override Object ReadJson(
JsonReader reader,
Type objectType,
Object existingValue,
JsonSerializer serializer)
{
if (reader.TokenType != JsonToken.StartObject)
{
return null;
}
JobType? jobType = null;
JObject value = JObject.Load(reader);
if (!value.TryGetValue("Type", StringComparison.OrdinalIgnoreCase, out JToken typeValue))
{
return existingValue;
}
else
{
if (typeValue.Type == JTokenType.Integer)
{
jobType = (JobType)(int)typeValue;
}
else if (typeValue.Type == JTokenType.String)
{
JobType parsedType;
if (Enum.TryParse((String)typeValue, ignoreCase: true, result: out parsedType))
{
jobType = parsedType;
}
}
}
if (jobType == null)
{
return existingValue;
}
Object newValue = null;
switch (jobType)
{
case JobType.Job:
newValue = new Job();
break;
case JobType.ReusableWorkflowJob:
newValue = new ReusableWorkflowJob();
break;
}
if (value != null)
{
using JsonReader objectReader = value.CreateReader();
serializer.Populate(objectReader, newValue);
}
return newValue;
}
public override void WriteJson(
JsonWriter writer,
Object value,
JsonSerializer serializer)
{
throw new NotImplementedException();
}
}
}

View File

@@ -0,0 +1,12 @@
using System;
namespace GitHub.Actions.WorkflowParser
{
public interface IServerTraceWriter
{
void TraceAlways(
Int32 tracepoint,
String format,
params Object[] arguments);
}
}

View File

@@ -0,0 +1,18 @@
#nullable enable
using Newtonsoft.Json;
namespace GitHub.Actions.WorkflowParser
{
[JsonConverter(typeof(IStepJsonConverter))]
public interface IStep
{
string? Id
{
get;
set;
}
IStep Clone(bool omitSource);
}
}

View File

@@ -0,0 +1,70 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Reflection;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.WorkflowParser
{
internal sealed class IStepJsonConverter : JsonConverter
{
public override Boolean CanWrite
{
get
{
return false;
}
}
public override Boolean CanConvert(Type objectType)
{
return typeof(IStep).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo());
}
public override Object ReadJson(
JsonReader reader,
Type objectType,
Object existingValue,
JsonSerializer serializer)
{
if (reader.TokenType != JsonToken.StartObject)
{
return null;
}
JObject value = JObject.Load(reader);
Object newValue = null;
if (value.TryGetValue("Uses", StringComparison.OrdinalIgnoreCase, out _))
{
newValue = new ActionStep();
}
else if (value.TryGetValue("Run", StringComparison.OrdinalIgnoreCase, out _))
{
newValue = new RunStep();
}
else
{
return existingValue;
}
if (value != null)
{
using JsonReader objectReader = value.CreateReader();
serializer.Populate(objectReader, newValue);
}
return newValue;
}
public override void WriteJson(
JsonWriter writer,
Object value,
JsonSerializer serializer)
{
throw new NotImplementedException();
}
}
}

View File

@@ -0,0 +1,213 @@
#nullable enable
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public sealed class Job : IJob
{
[DataMember(Order = 0, Name = "type", EmitDefaultValue = true)]
public JobType Type
{
get
{
return JobType.Job;
}
}
[DataMember(Order = 1, Name = "id", EmitDefaultValue = false)]
public StringToken? Id
{
get;
set;
}
/// <summary>
/// Gets or sets the display name
/// </summary>
[DataMember(Order = 2, Name = "name", EmitDefaultValue = false)]
public ScalarToken? Name
{
get;
set;
}
public IList<StringToken> Needs
{
get
{
if (m_needs == null)
{
m_needs = new List<StringToken>();
}
return m_needs;
}
}
[DataMember(Order = 3, Name = "needs", EmitDefaultValue = false)]
private List<StringToken>? m_needs;
[DataMember(Order = 4, Name = "if", EmitDefaultValue = false)]
public BasicExpressionToken? If
{
get;
set;
}
[DataMember(Order = 5, Name = "strategy", EmitDefaultValue = false)]
public TemplateToken? Strategy
{
get;
set;
}
[DataMember(Order = 6, Name = "continue-on-error", EmitDefaultValue = false)]
public ScalarToken? ContinueOnError
{
get;
set;
}
[DataMember(Order = 7, Name = "timeout-minutes", EmitDefaultValue = false)]
public ScalarToken? TimeoutMinutes
{
get;
set;
}
[DataMember(Order = 8, Name = "cancel-timeout-minutes", EmitDefaultValue = false)]
public ScalarToken? CancelTimeoutMinutes
{
get;
set;
}
[DataMember(Order = 9, Name = "concurrency", EmitDefaultValue = false)]
public TemplateToken? Concurrency
{
get;
set;
}
[DataMember(Order = 10, Name = "permissions", EmitDefaultValue = false)]
public Permissions? Permissions
{
get;
set;
}
[DataMember(Order = 11, Name = "env", EmitDefaultValue = false)]
public TemplateToken? Env
{
get;
set;
}
[DataMember(Order = 12, Name = "environment", EmitDefaultValue = false)]
public TemplateToken? Environment
{
get;
set;
}
[DataMember(Order = 13, Name = "defaults", EmitDefaultValue = false)]
public TemplateToken? Defaults
{
get;
set;
}
[DataMember(Order = 14, Name = "runs-on", EmitDefaultValue = false)]
public TemplateToken? RunsOn
{
get;
set;
}
[DataMember(Order = 15, Name = "container", EmitDefaultValue = false)]
public TemplateToken? Container
{
get;
set;
}
[DataMember(Order = 16, Name = "services", EmitDefaultValue = false)]
public TemplateToken? Services
{
get;
set;
}
[DataMember(Order = 17, Name = "outputs", EmitDefaultValue = false)]
public TemplateToken? Outputs
{
get;
set;
}
public IList<IStep> Steps
{
get
{
if (m_steps == null)
{
m_steps = new List<IStep>();
}
return m_steps;
}
}
[DataMember(Order = 18, Name = "steps", EmitDefaultValue = false)]
private List<IStep>? m_steps;
[DataMember(Order = 19, Name = "snapshot", EmitDefaultValue = false)]
public TemplateToken? Snapshot
{
get;
set;
}
public IJob Clone(bool omitSource)
{
var result = new Job
{
CancelTimeoutMinutes = CancelTimeoutMinutes?.Clone(omitSource) as ScalarToken,
Concurrency = Concurrency?.Clone(omitSource),
Container = Container?.Clone(omitSource),
ContinueOnError = ContinueOnError?.Clone(omitSource) as ScalarToken,
Defaults = Defaults?.Clone(omitSource),
Env = Env?.Clone(omitSource),
Environment = Environment?.Clone(omitSource),
Id = Id?.Clone(omitSource) as StringToken,
If = If?.Clone(omitSource) as BasicExpressionToken,
Name = Name?.Clone(omitSource) as ScalarToken,
Outputs = Outputs?.Clone(omitSource),
Permissions = Permissions?.Clone(),
RunsOn = RunsOn?.Clone(omitSource),
Services = Services?.Clone(omitSource),
Strategy = Strategy?.Clone(omitSource),
TimeoutMinutes = TimeoutMinutes?.Clone(omitSource) as ScalarToken,
Snapshot = Snapshot?.Clone(omitSource),
};
result.Needs.AddRange(Needs.Select(x => (x.Clone(omitSource) as StringToken)!));
result.Steps.AddRange(Steps.Select(x => x.Clone(omitSource)));
return result;
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_needs?.Count == 0)
{
m_needs = null;
}
if (m_steps?.Count == 0)
{
m_steps = null;
}
}
}
}

View File

@@ -0,0 +1,86 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
namespace GitHub.Actions.WorkflowParser
{
public sealed class JobContainer
{
/// <summary>
/// Gets or sets the environment which is provided to the container.
/// </summary>
public IDictionary<String, String> Environment
{
get;
set;
}
/// <summary>
/// Gets or sets the container image name.
/// </summary>
public String Image
{
get;
set;
}
/// <summary>
/// Gets or sets the options used for the container instance.
/// </summary>
public String Options
{
get;
set;
}
/// <summary>
/// Gets or sets the volumes which are mounted into the container.
/// </summary>
public IList<String> Volumes
{
get;
set;
}
/// <summary>
/// Gets or sets the ports which are exposed on the container.
/// </summary>
public IList<String> Ports
{
get;
set;
}
/// <summary>
/// Gets or sets the credentials used for pulling the container iamge.
/// </summary>
public ContainerRegistryCredentials Credentials
{
get;
set;
}
}
public sealed class ContainerRegistryCredentials
{
/// <summary>
/// Gets or sets the user to authenticate to a registry with
/// </summary>
public String Username
{
get;
set;
}
/// <summary>
/// Gets or sets the password to authenticate to a registry with
/// </summary>
public String Password
{
get;
set;
}
}
}

View File

@@ -0,0 +1,7 @@
namespace GitHub.Actions.WorkflowParser;
public enum JobType
{
Job,
ReusableWorkflowJob,
}

View File

@@ -0,0 +1,21 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using GitHub.Actions.Expressions.Sdk;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// This expression node retrieves a user-defined named-value. This is used during expression evaluation.
/// </summary>
internal sealed class ContextValueNode : NamedValue
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
return (context.State as TemplateContext).ExpressionValues[Name];
}
}
}

View File

@@ -0,0 +1,25 @@
using System;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
internal class EmptyTraceWriter : ITraceWriter
{
public void Error(
String format,
params Object[] args)
{
}
public void Info(
String format,
params Object[] args)
{
}
public void Verbose(
String format,
params Object[] args)
{
}
}
}

View File

@@ -0,0 +1,27 @@
using System;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Wraps an ITraceWriter so it can be passed for expression evaluation.
/// </summary>
internal sealed class ExpressionTraceWriter : GitHub.Actions.Expressions.ITraceWriter
{
public ExpressionTraceWriter(ITraceWriter trace)
{
m_trace = trace;
}
public void Info(String message)
{
m_trace.Info("{0}", message);
}
public void Verbose(String message)
{
m_trace.Verbose("{0}", message);
}
private readonly ITraceWriter m_trace;
}
}

View File

@@ -0,0 +1,26 @@
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Interface for reading a source object (or file).
/// This interface is used by TemplateReader to build a TemplateToken DOM.
/// </summary>
internal interface IObjectReader
{
Boolean AllowLiteral(out LiteralToken token);
Boolean AllowSequenceStart(out SequenceToken token);
Boolean AllowSequenceEnd();
Boolean AllowMappingStart(out MappingToken token);
Boolean AllowMappingEnd();
void ValidateStart();
void ValidateEnd();
}
}

View File

@@ -0,0 +1,31 @@
using System;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Interface for building an object. This interface is used by
/// TemplateWriter to convert a TemplateToken DOM to another format.
/// </summary>
internal interface IObjectWriter
{
void WriteNull();
void WriteBoolean(Boolean value);
void WriteNumber(Double value);
void WriteString(String value);
void WriteSequenceStart();
void WriteSequenceEnd();
void WriteMappingStart();
void WriteMappingEnd();
void WriteStart();
void WriteEnd();
}
}

View File

@@ -0,0 +1,19 @@
using System;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
public interface ITraceWriter
{
void Error(
String format,
params Object[] args);
void Info(
String format,
params Object[] args);
void Verbose(
String format,
params Object[] args);
}
}

View File

@@ -0,0 +1,10 @@
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
internal static class ITraceWriterExtensions
{
internal static GitHub.Actions.Expressions.ITraceWriter ToExpressionTraceWriter(this ITraceWriter trace)
{
return new ExpressionTraceWriter(trace);
}
}
}

View File

@@ -0,0 +1,57 @@
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
internal sealed class BooleanDefinition : ScalarDefinition
{
internal BooleanDefinition()
{
}
internal BooleanDefinition(MappingToken definition)
: base(definition)
{
foreach (var definitionPair in definition)
{
var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key");
switch (definitionKey.Value)
{
case TemplateConstants.Boolean:
var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Boolean}");
foreach (var mappingPair in mapping)
{
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Boolean} key");
switch (mappingKey.Value)
{
default:
mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.Boolean} key");
break;
}
}
break;
case TemplateConstants.CoerceRaw:
continue;
default:
definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key");
break;
}
}
}
internal override DefinitionType DefinitionType => DefinitionType.Boolean;
internal override Boolean IsMatch(LiteralToken literal)
{
return literal is BooleanToken;
}
internal override void Validate(
TemplateSchema schema,
String name)
{
}
}
}

View File

@@ -0,0 +1,77 @@
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
/// <summary>
/// Defines the allowable schema for a user defined type
/// </summary>
internal abstract class Definition
{
protected Definition()
{
}
protected Definition(MappingToken definition)
{
for (var i = 0; i < definition.Count; )
{
var definitionKey = definition[i].Key.AssertString($"{TemplateConstants.Definition} key");
if (String.Equals(definitionKey.Value, TemplateConstants.Context, StringComparison.Ordinal))
{
var context = definition[i].Value.AssertSequence($"{TemplateConstants.Context}");
definition.RemoveAt(i);
var readerContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var evaluatorContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
foreach (TemplateToken item in context)
{
var itemStr = item.AssertString($"{TemplateConstants.Context} item").Value;
readerContext.Add(itemStr);
// Remove min/max parameter info
var paramIndex = itemStr.IndexOf('(');
if (paramIndex > 0)
{
evaluatorContext.Add(String.Concat(itemStr.Substring(0, paramIndex + 1), ")"));
}
else
{
evaluatorContext.Add(itemStr);
}
}
ReaderContext = readerContext.ToArray();
EvaluatorContext = evaluatorContext.ToArray();
}
else if (String.Equals(definitionKey.Value, TemplateConstants.Description, StringComparison.Ordinal))
{
definition.RemoveAt(i);
}
else
{
i++;
}
}
}
internal abstract DefinitionType DefinitionType { get; }
/// <summary>
/// Used by the template reader to determine allowed expression values and functions.
/// Also used by the template reader to validate function min/max parameters.
/// </summary>
internal String[] ReaderContext { get; private set; } = new String[0];
/// <summary>
/// Used by the template evaluator to determine allowed expression values and functions.
/// The min/max parameter info is omitted.
/// </summary>
internal String[] EvaluatorContext { get; private set; } = new String[0];
internal abstract void Validate(
TemplateSchema schema,
String name);
}
}

View File

@@ -0,0 +1,16 @@
using System;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
internal enum DefinitionType
{
Null,
Boolean,
Number,
String,
Sequence,
Mapping,
OneOf,
AllowedValues,
}
}

View File

@@ -0,0 +1,111 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
internal sealed class MappingDefinition : Definition
{
internal MappingDefinition()
{
}
internal MappingDefinition(MappingToken definition)
: base(definition)
{
foreach (var definitionPair in definition)
{
var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key");
switch (definitionKey.Value)
{
case TemplateConstants.Mapping:
var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Mapping}");
foreach (var mappingPair in mapping)
{
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} key");
switch (mappingKey.Value)
{
case TemplateConstants.Properties:
var properties = mappingPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties}");
foreach (var propertiesPair in properties)
{
var propertyName = propertiesPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties} key");
Properties.Add(propertyName.Value, new PropertyDefinition(propertiesPair.Value));
}
break;
case TemplateConstants.LooseKeyType:
var looseKeyType = mappingPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.LooseKeyType}");
LooseKeyType = looseKeyType.Value;
break;
case TemplateConstants.LooseValueType:
var looseValueType = mappingPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.LooseValueType}");
LooseValueType = looseValueType.Value;
break;
default:
definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key");
break;
}
}
break;
case TemplateConstants.CoerceRaw:
continue;
default:
definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key");
break;
}
}
}
internal override DefinitionType DefinitionType => DefinitionType.Mapping;
internal String LooseKeyType { get; set; }
internal String LooseValueType { get; set; }
internal Dictionary<String, PropertyDefinition> Properties { get; } = new Dictionary<String, PropertyDefinition>(StringComparer.Ordinal);
internal override void Validate(
TemplateSchema schema,
String name)
{
// Lookup loose key type
if (!String.IsNullOrEmpty(LooseKeyType))
{
schema.GetDefinition(LooseKeyType);
// Lookup loose value type
if (!String.IsNullOrEmpty(LooseValueType))
{
schema.GetDefinition(LooseValueType);
}
else
{
throw new ArgumentException($"Property '{TemplateConstants.LooseKeyType}' is defined but '{TemplateConstants.LooseValueType}' is not defined on '{name}'");
}
}
// Otherwise validate loose value type not be defined
else if (!String.IsNullOrEmpty(LooseValueType))
{
throw new ArgumentException($"Property '{TemplateConstants.LooseValueType}' is defined but '{TemplateConstants.LooseKeyType}' is not defined");
}
// Lookup each property
foreach (var property in Properties)
{
if (String.IsNullOrEmpty(property.Value.Type))
{
throw new ArgumentException($"Type not specified for the '{property.Key}' property on the '{name}' type");
}
schema.GetDefinition(property.Value.Type);
}
}
}
}

View File

@@ -0,0 +1,58 @@
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
internal sealed class NullDefinition : ScalarDefinition
{
internal NullDefinition()
{
}
internal NullDefinition(MappingToken definition)
: base(definition)
{
foreach (var definitionPair in definition)
{
var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key");
switch (definitionKey.Value)
{
case TemplateConstants.Null:
var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Null}");
foreach (var mappingPair in mapping)
{
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Null} key");
switch (mappingKey.Value)
{
default:
mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.Null} key");
break;
}
}
break;
case TemplateConstants.AllowedValues:
case TemplateConstants.CoerceRaw:
continue;
default:
definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key");
break;
}
}
}
internal override DefinitionType DefinitionType => DefinitionType.Null;
internal override Boolean IsMatch(LiteralToken literal)
{
return literal is NullToken;
}
internal override void Validate(
TemplateSchema schema,
String name)
{
}
}
}

View File

@@ -0,0 +1,57 @@
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
internal sealed class NumberDefinition : ScalarDefinition
{
internal NumberDefinition()
{
}
internal NumberDefinition(MappingToken definition)
: base(definition)
{
foreach (var definitionPair in definition)
{
var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key");
switch (definitionKey.Value)
{
case TemplateConstants.Number:
var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Number}");
foreach (var mappingPair in mapping)
{
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Number} key");
switch (mappingKey.Value)
{
default:
mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.Number} key");
break;
}
}
break;
case TemplateConstants.CoerceRaw:
continue;
default:
definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key");
break;
}
}
}
internal override DefinitionType DefinitionType => DefinitionType.Number;
internal override Boolean IsMatch(LiteralToken literal)
{
return literal is NumberToken;
}
internal override void Validate(
TemplateSchema schema,
String name)
{
}
}
}

View File

@@ -0,0 +1,191 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
/// <summary>
/// Must resolve to exactly one of the referenced definitions
/// </summary>
internal sealed class OneOfDefinition : Definition
{
internal OneOfDefinition()
{
}
internal OneOfDefinition(MappingToken definition)
: base(definition)
{
foreach (var definitionPair in definition)
{
var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key");
switch (definitionKey.Value)
{
case TemplateConstants.OneOf:
var oneOf = definitionPair.Value.AssertSequence(TemplateConstants.OneOf);
foreach (var oneOfItem in oneOf)
{
var reference = oneOfItem.AssertString(TemplateConstants.OneOf);
OneOf.Add(reference.Value);
}
break;
case TemplateConstants.CoerceRaw:
continue;
default:
definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key");
break;
}
}
}
internal override DefinitionType DefinitionType => DefinitionType.OneOf;
internal List<String> OneOf { get; } = new List<String>();
internal override void Validate(
TemplateSchema schema,
String name)
{
if (OneOf.Count == 0)
{
throw new ArgumentException($"'{name}' does not contain any references");
}
var foundLooseKeyType = false;
var mappingDefinitions = default(List<MappingDefinition>);
var sequenceDefinition = default(SequenceDefinition);
var nullDefinition = default(NullDefinition);
var booleanDefinition = default(BooleanDefinition);
var numberDefinition = default(NumberDefinition);
var stringDefinitions = default(List<StringDefinition>);
foreach (var nestedType in OneOf)
{
var nestedDefinition = schema.GetDefinition(nestedType);
if (nestedDefinition.ReaderContext.Length > 0)
{
throw new ArgumentException($"'{name}' is a one-of definition and references another definition that defines context. This is currently not supported.");
}
if (nestedDefinition is MappingDefinition mappingDefinition)
{
if (mappingDefinitions == null)
{
mappingDefinitions = new List<MappingDefinition>();
}
mappingDefinitions.Add(mappingDefinition);
if (!String.IsNullOrEmpty(mappingDefinition.LooseKeyType))
{
foundLooseKeyType = true;
}
}
else if (nestedDefinition is SequenceDefinition s)
{
// Multiple sequence definitions not allowed
if (sequenceDefinition != null)
{
throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Sequence}'");
}
sequenceDefinition = s;
}
else if (nestedDefinition is NullDefinition n)
{
// Multiple sequence definitions not allowed
if (nullDefinition != null)
{
throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Null}'");
}
nullDefinition = n;
}
else if (nestedDefinition is BooleanDefinition b)
{
// Multiple boolean definitions not allowed
if (booleanDefinition != null)
{
throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Boolean}'");
}
booleanDefinition = b;
}
else if (nestedDefinition is NumberDefinition num)
{
// Multiple number definitions not allowed
if (numberDefinition != null)
{
throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Number}'");
}
numberDefinition = num;
}
else if (nestedDefinition is StringDefinition stringDefinition)
{
// First string definition
if (stringDefinitions == null)
{
stringDefinitions = new List<StringDefinition>();
}
// Multiple string definitions, all must be 'Constant'
else if ((stringDefinitions.Count == 1 && String.IsNullOrEmpty(stringDefinitions[0].Constant))
|| String.IsNullOrEmpty(stringDefinition.Constant))
{
throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Scalar}', but some do not set '{TemplateConstants.Constant}'");
}
stringDefinitions.Add(stringDefinition);
}
else if (nestedDefinition is OneOfDefinition oneOfDefinition)
{
// Allow one-of to reference another one-of
}
else
{
throw new ArgumentException($"'{name}' refers to a '{nestedDefinition.DefinitionType}' definition");
}
}
if (mappingDefinitions?.Count > 1)
{
if (foundLooseKeyType)
{
throw new ArgumentException($"'{name}' refers to two mappings that both set '{TemplateConstants.LooseKeyType}'");
}
var seenProperties = new Dictionary<String, PropertyDefinition>(StringComparer.Ordinal);
foreach (var mappingDefinition in mappingDefinitions)
{
foreach (var newProperty in mappingDefinition.Properties)
{
// Already seen
if (seenProperties.TryGetValue(newProperty.Key, out PropertyDefinition existingProperty))
{
// Types match
if (String.Equals(existingProperty.Type, newProperty.Value.Type, StringComparison.Ordinal))
{
continue;
}
// Collision
throw new ArgumentException($"'{name}' contains two mappings with the same property, but each refers to a different type. All matching properties must refer to the same type.");
}
// New
else
{
seenProperties.Add(newProperty.Key, newProperty.Value);
}
}
}
}
}
}
}

View File

@@ -0,0 +1,47 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
internal sealed class PropertyDefinition
{
internal PropertyDefinition(TemplateToken token)
{
if (token is StringToken stringToken)
{
Type = stringToken.Value;
}
else
{
var mapping = token.AssertMapping($"{TemplateConstants.MappingPropertyValue}");
foreach (var mappingPair in mapping)
{
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.MappingPropertyValue} key");
switch (mappingKey.Value)
{
case TemplateConstants.Type:
Type = mappingPair.Value.AssertString($"{TemplateConstants.MappingPropertyValue} {TemplateConstants.Type}").Value;
break;
case TemplateConstants.Required:
Required = mappingPair.Value.AssertBoolean($"{TemplateConstants.MappingPropertyValue} {TemplateConstants.Required}").Value;
break;
case TemplateConstants.Description:
Description = mappingPair.Value.AssertString($"{TemplateConstants.MappingPropertyValue} {TemplateConstants.Description}").Value;
break;
default:
mappingKey.AssertUnexpectedValue($"{TemplateConstants.MappingPropertyValue} key");
break;
}
}
}
}
internal String Type { get; set; }
internal Boolean Required { get; set; }
internal String Description { get; set; }
}
}

View File

@@ -0,0 +1,19 @@
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
internal abstract class ScalarDefinition : Definition
{
internal ScalarDefinition()
{
}
internal ScalarDefinition(MappingToken definition)
: base(definition)
{
}
internal abstract Boolean IsMatch(LiteralToken literal);
}
}

View File

@@ -0,0 +1,69 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
internal sealed class SequenceDefinition : Definition
{
internal SequenceDefinition()
{
}
internal SequenceDefinition(MappingToken definition)
: base(definition)
{
foreach (var definitionPair in definition)
{
var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key");
switch (definitionKey.Value)
{
case TemplateConstants.Sequence:
var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Sequence}");
foreach (var mappingPair in mapping)
{
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Sequence} key");
switch (mappingKey.Value)
{
case TemplateConstants.ItemType:
var itemType = mappingPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Sequence} {TemplateConstants.ItemType}");
ItemType = itemType.Value;
break;
default:
mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.Sequence} key");
break;
}
}
break;
case TemplateConstants.CoerceRaw:
continue;
default:
definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key");
break;
}
}
}
internal override DefinitionType DefinitionType => DefinitionType.Sequence;
internal String ItemType { get; set; }
internal override void Validate(
TemplateSchema schema,
String name)
{
if (String.IsNullOrEmpty(ItemType))
{
throw new ArgumentException($"'{name}' does not define '{TemplateConstants.ItemType}'");
}
// Lookup item type
schema.GetDefinition(ItemType);
}
}
}

View File

@@ -0,0 +1,116 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
internal sealed class StringDefinition : ScalarDefinition
{
internal StringDefinition()
{
}
internal StringDefinition(MappingToken definition)
: base(definition)
{
foreach (var definitionPair in definition)
{
var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key");
switch (definitionKey.Value)
{
case TemplateConstants.String:
var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.String}");
foreach (var mappingPair in mapping)
{
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.String} key");
switch (mappingKey.Value)
{
case TemplateConstants.Constant:
var constantStringToken = mappingPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.String} {TemplateConstants.Constant}");
Constant = constantStringToken.Value;
break;
case TemplateConstants.IgnoreCase:
var ignoreCaseBooleanToken = mappingPair.Value.AssertBoolean($"{TemplateConstants.Definition} {TemplateConstants.String} {TemplateConstants.IgnoreCase}");
IgnoreCase = ignoreCaseBooleanToken.Value;
break;
case TemplateConstants.RequireNonEmpty:
var requireNonEmptyBooleanToken = mappingPair.Value.AssertBoolean($"{TemplateConstants.Definition} {TemplateConstants.String} {TemplateConstants.RequireNonEmpty}");
RequireNonEmpty = requireNonEmptyBooleanToken.Value;
break;
case TemplateConstants.IsExpression:
var isExpressionBooleanToken = mappingPair.Value.AssertBoolean($"{TemplateConstants.Definition} {TemplateConstants.String} {TemplateConstants.IsExpression}");
IsExpression = isExpressionBooleanToken.Value;
break;
default:
mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.String} key");
break;
}
}
break;
case TemplateConstants.CoerceRaw:
continue;
default:
definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key");
break;
}
}
}
internal override DefinitionType DefinitionType => DefinitionType.String;
internal String Constant { get; set; }
internal Boolean IgnoreCase { get; set; }
internal Boolean RequireNonEmpty { get; set; }
internal Boolean IsExpression { get; set; }
internal override Boolean IsMatch(LiteralToken literal)
{
if (literal is StringToken str)
{
var value = str.Value;
if (!String.IsNullOrEmpty(Constant))
{
var comparison = IgnoreCase ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal;
if (String.Equals(Constant, value, comparison))
{
return true;
}
}
else if (RequireNonEmpty)
{
if (!String.IsNullOrEmpty(value))
{
return true;
}
}
else
{
return true;
}
}
return false;
}
internal override void Validate(
TemplateSchema schema,
String name)
{
if (!String.IsNullOrEmpty(Constant) && RequireNonEmpty)
{
throw new ArgumentException($"Properties '{TemplateConstants.Constant}' and '{TemplateConstants.RequireNonEmpty}' cannot both be set");
}
}
}
}

View File

@@ -0,0 +1,484 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Schema
{
/// <summary>
/// This models the root schema object and contains definitions
/// </summary>
public sealed class TemplateSchema
{
internal TemplateSchema()
: this(null)
{
}
private TemplateSchema(MappingToken mapping)
{
// Add built-in type: null
var nullDefinition = new NullDefinition();
Definitions.Add(TemplateConstants.Null, nullDefinition);
// Add built-in type: boolean
var booleanDefinition = new BooleanDefinition();
Definitions.Add(TemplateConstants.Boolean, booleanDefinition);
// Add built-in type: number
var numberDefinition = new NumberDefinition();
Definitions.Add(TemplateConstants.Number, numberDefinition);
// Add built-in type: string
var stringDefinition = new StringDefinition();
Definitions.Add(TemplateConstants.String, stringDefinition);
// Add built-in type: sequence
var sequenceDefinition = new SequenceDefinition { ItemType = TemplateConstants.Any };
Definitions.Add(TemplateConstants.Sequence, sequenceDefinition);
// Add built-in type: mapping
var mappingDefinition = new MappingDefinition { LooseKeyType = TemplateConstants.String, LooseValueType = TemplateConstants.Any };
Definitions.Add(TemplateConstants.Mapping, mappingDefinition);
// Add built-in type: any
var anyDefinition = new OneOfDefinition();
anyDefinition.OneOf.Add(TemplateConstants.Null);
anyDefinition.OneOf.Add(TemplateConstants.Boolean);
anyDefinition.OneOf.Add(TemplateConstants.Number);
anyDefinition.OneOf.Add(TemplateConstants.String);
anyDefinition.OneOf.Add(TemplateConstants.Sequence);
anyDefinition.OneOf.Add(TemplateConstants.Mapping);
Definitions.Add(TemplateConstants.Any, anyDefinition);
if (mapping != null)
{
foreach (var pair in mapping)
{
var key = pair.Key.AssertString($"{TemplateConstants.TemplateSchema} key");
switch (key.Value)
{
case TemplateConstants.Version:
var version = pair.Value.AssertString(TemplateConstants.Version);
Version = version.Value;
break;
case TemplateConstants.Definitions:
var definitions = pair.Value.AssertMapping(TemplateConstants.Definitions);
foreach (var definitionsPair in definitions)
{
var definitionsKey = definitionsPair.Key.AssertString($"{TemplateConstants.Definitions} key");
var definitionsValue = definitionsPair.Value.AssertMapping(TemplateConstants.Definition);
var definition = default(Definition);
foreach (var definitionPair in definitionsValue)
{
var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key");
switch (definitionKey.Value)
{
case TemplateConstants.Null:
definition = new NullDefinition(definitionsValue);
break;
case TemplateConstants.Boolean:
definition = new BooleanDefinition(definitionsValue);
break;
case TemplateConstants.Number:
definition = new NumberDefinition(definitionsValue);
break;
case TemplateConstants.String:
definition = new StringDefinition(definitionsValue);
break;
case TemplateConstants.Sequence:
definition = new SequenceDefinition(definitionsValue);
break;
case TemplateConstants.Mapping:
definition = new MappingDefinition(definitionsValue);
break;
case TemplateConstants.OneOf:
definition = new OneOfDefinition(definitionsValue);
break;
case TemplateConstants.AllowedValues:
// Ignore allowed-values in CSharp parser, we don't need to support events here
definition = new NullDefinition(definitionsValue);
break;
case TemplateConstants.CoerceRaw:
case TemplateConstants.Context:
case TemplateConstants.Description:
continue;
default:
definitionKey.AssertUnexpectedValue("definition mapping key"); // throws
break;
}
break;
}
if (definition == null)
{
throw new ArgumentException($"Unable to determine definition details. Specify the '{TemplateConstants.Structure}' property");
}
Definitions.Add(definitionsKey.Value, definition);
}
break;
default:
key.AssertUnexpectedValue($"{TemplateConstants.TemplateSchema} key"); // throws
break;
}
}
}
}
internal Dictionary<String, Definition> Definitions { get; } = new Dictionary<String, Definition>(StringComparer.Ordinal);
internal String Version { get; }
/// <summary>
/// Loads a user's schema file
/// </summary>
internal static TemplateSchema Load(IObjectReader objectReader)
{
var context = new TemplateContext
{
CancellationToken = CancellationToken.None,
Errors = new TemplateValidationErrors(maxErrors: 10, maxMessageLength: 500),
Memory = new TemplateMemory(
maxDepth: 50,
maxEvents: 1000000, // 1 million
maxBytes: 1024 * 1024), // 1 mb
TraceWriter = new EmptyTraceWriter(),
};
var value = TemplateReader.Read(context, TemplateConstants.TemplateSchema, objectReader, null, Schema, out _);
if (context.Errors.Count > 0)
{
throw new TemplateValidationException(context.Errors);
}
var mapping = value.AssertMapping(TemplateConstants.TemplateSchema);
var schema = new TemplateSchema(mapping);
schema.Validate();
return schema;
}
internal IEnumerable<T> Get<T>(Definition definition)
where T : Definition
{
if (definition is T match)
{
yield return match;
}
else if (definition is OneOfDefinition oneOf)
{
foreach (var reference in oneOf.OneOf)
{
var nestedDefinition = GetDefinition(reference);
if (nestedDefinition is T match2)
{
yield return match2;
}
}
}
}
internal Definition GetDefinition(String type)
{
if (Definitions.TryGetValue(type, out Definition value))
{
return value;
}
throw new ArgumentException($"Schema definition '{type}' not found");
}
internal Boolean HasProperties(MappingDefinition definition)
{
return definition.Properties.Count > 0;
}
internal Boolean TryGetProperty(
MappingDefinition definition,
String name,
out String type)
{
if (definition.Properties.TryGetValue(name, out PropertyDefinition property))
{
type = property.Type;
return true;
}
type = null;
return false;
}
internal Boolean TryMatchKey(
List<MappingDefinition> definitions,
String key,
out String valueType)
{
valueType = null;
// Check for a matching well known property
var notFoundInSome = false;
for (var i = 0; i < definitions.Count; i++)
{
var definition = definitions[i];
if (TryGetProperty(definition, key, out String t))
{
if (valueType == null)
{
valueType = t;
}
}
else
{
notFoundInSome = true;
}
}
// Check if found
if (valueType != null)
{
// Filter the matched definitions if needed
if (notFoundInSome)
{
for (var i = 0; i < definitions.Count;)
{
if (TryGetProperty(definitions[i], key, out _))
{
i++;
}
else
{
definitions.RemoveAt(i);
}
}
}
return true;
}
return false;
}
/// <summary>
/// The built-in schema for reading schema files
/// </summary>
private static TemplateSchema Schema
{
get
{
if (s_schema == null)
{
var schema = new TemplateSchema();
StringDefinition stringDefinition;
SequenceDefinition sequenceDefinition;
MappingDefinition mappingDefinition;
OneOfDefinition oneOfDefinition;
// template-schema
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Version, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Definitions, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Definitions)));
schema.Definitions.Add(TemplateConstants.TemplateSchema, mappingDefinition);
// definitions
mappingDefinition = new MappingDefinition();
mappingDefinition.LooseKeyType = TemplateConstants.NonEmptyString;
mappingDefinition.LooseValueType = TemplateConstants.Definition;
schema.Definitions.Add(TemplateConstants.Definitions, mappingDefinition);
// definition
oneOfDefinition = new OneOfDefinition();
oneOfDefinition.OneOf.Add(TemplateConstants.NullDefinition);
oneOfDefinition.OneOf.Add(TemplateConstants.BooleanDefinition);
oneOfDefinition.OneOf.Add(TemplateConstants.NumberDefinition);
oneOfDefinition.OneOf.Add(TemplateConstants.StringDefinition);
oneOfDefinition.OneOf.Add(TemplateConstants.SequenceDefinition);
oneOfDefinition.OneOf.Add(TemplateConstants.MappingDefinition);
oneOfDefinition.OneOf.Add(TemplateConstants.OneOfDefinition);
schema.Definitions.Add(TemplateConstants.Definition, oneOfDefinition);
// null-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.CoerceRaw, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Null, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.NullDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.NullDefinition, mappingDefinition);
// null-definition-properties
mappingDefinition = new MappingDefinition();
schema.Definitions.Add(TemplateConstants.NullDefinitionProperties, mappingDefinition);
// boolean-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.CoerceRaw, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Boolean, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.BooleanDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.BooleanDefinition, mappingDefinition);
// boolean-definition-properties
mappingDefinition = new MappingDefinition();
schema.Definitions.Add(TemplateConstants.BooleanDefinitionProperties, mappingDefinition);
// number-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.CoerceRaw, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Number, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.NumberDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.NumberDefinition, mappingDefinition);
// number-definition-properties
mappingDefinition = new MappingDefinition();
schema.Definitions.Add(TemplateConstants.NumberDefinitionProperties, mappingDefinition);
// string-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.CoerceRaw, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.String, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.StringDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.StringDefinition, mappingDefinition);
// string-definition-properties
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Constant, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.IgnoreCase, new PropertyDefinition(new StringToken(null, null, null,TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.RequireNonEmpty, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.IsExpression, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
schema.Definitions.Add(TemplateConstants.StringDefinitionProperties, mappingDefinition);
// sequence-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.CoerceRaw, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Sequence, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.SequenceDefinition, mappingDefinition);
// sequence-definition-properties
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.ItemType, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
schema.Definitions.Add(TemplateConstants.SequenceDefinitionProperties, mappingDefinition);
// mapping-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.CoerceRaw, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Mapping, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.MappingDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.MappingDefinition, mappingDefinition);
// mapping-definition-properties
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Properties, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Properties)));
mappingDefinition.Properties.Add(TemplateConstants.LooseKeyType, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.LooseValueType, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
schema.Definitions.Add(TemplateConstants.MappingDefinitionProperties, mappingDefinition);
// properties
mappingDefinition = new MappingDefinition();
mappingDefinition.LooseKeyType = TemplateConstants.NonEmptyString;
mappingDefinition.LooseValueType = TemplateConstants.PropertyValue;
schema.Definitions.Add(TemplateConstants.Properties, mappingDefinition);
// property-value
oneOfDefinition = new OneOfDefinition();
oneOfDefinition.OneOf.Add(TemplateConstants.NonEmptyString);
oneOfDefinition.OneOf.Add(TemplateConstants.MappingPropertyValue);
schema.Definitions.Add(TemplateConstants.PropertyValue, oneOfDefinition);
// mapping-property-value
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Type, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Required, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.String)));
schema.Definitions.Add(TemplateConstants.MappingPropertyValue, mappingDefinition);
// one-of-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.CoerceRaw, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.OneOf, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.AllowedValues, new PropertyDefinition(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
schema.Definitions.Add(TemplateConstants.OneOfDefinition, mappingDefinition);
// non-empty-string
stringDefinition = new StringDefinition();
stringDefinition.RequireNonEmpty = true;
schema.Definitions.Add(TemplateConstants.NonEmptyString, stringDefinition);
// sequence-of-non-empty-string
sequenceDefinition = new SequenceDefinition();
sequenceDefinition.ItemType = TemplateConstants.NonEmptyString;
schema.Definitions.Add(TemplateConstants.SequenceOfNonEmptyString, sequenceDefinition);
schema.Validate();
Interlocked.CompareExchange(ref s_schema, schema, null);
}
return s_schema;
}
}
private void Validate()
{
var oneOfPairs = new List<KeyValuePair<String, OneOfDefinition>>();
foreach (var pair in Definitions)
{
var name = pair.Key;
if (!s_definitionNameRegex.IsMatch(name ?? String.Empty))
{
throw new ArgumentException($"Invalid definition name '{name}'");
}
var definition = pair.Value;
// Delay validation for 'one-of' definitions
if (definition is OneOfDefinition oneOf)
{
oneOfPairs.Add(new KeyValuePair<String, OneOfDefinition>(name, oneOf));
}
// Otherwise validate now
else
{
definition.Validate(this, name);
}
}
// Validate 'one-of' definitions
foreach (var pair in oneOfPairs)
{
var name = pair.Key;
var oneOf = pair.Value;
oneOf.Validate(this, name);
}
}
private static readonly Regex s_definitionNameRegex = new Regex("^[a-zA-Z_][a-zA-Z0-9_-]*$", RegexOptions.Compiled);
private static TemplateSchema s_schema;
}
}

View File

@@ -0,0 +1,20 @@
using System;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Tracks telemetry data during workflow parsing.
/// </summary>
public sealed class Telemetry
{
/// <summary>
/// Gets or sets the count of YAML anchors encountered during parsing.
/// </summary>
public Int32 YamlAnchors { get; set; }
/// <summary>
/// Gets or sets the count of YAML aliases encountered during parsing.
/// </summary>
public Int32 YamlAliases { get; set; }
}
}

View File

@@ -0,0 +1,58 @@
using System;
using System.Linq;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
internal static class TemplateConstants
{
internal const String AllowedValues = "allowed-values";
internal const String Any = "any";
internal const String Boolean = "boolean";
internal const String BooleanDefinition = "boolean-definition";
internal const String BooleanDefinitionProperties = "boolean-definition-properties";
internal const String CloseExpression = "}}";
internal const String CoerceRaw = "coerce-raw";
internal const String Constant = "constant";
internal const String Context = "context";
internal const String Definition = "definition";
internal const String Definitions = "definitions";
internal const String Description = "description";
internal const String IgnoreCase = "ignore-case";
internal const String InsertDirective = "insert";
internal const String IsExpression = "is-expression";
internal const String ItemType = "item-type";
internal const String LooseKeyType = "loose-key-type";
internal const String LooseValueType = "loose-value-type";
internal const String MaxConstant = "MAX";
internal const String Mapping = "mapping";
internal const String MappingDefinition = "mapping-definition";
internal const String MappingDefinitionProperties = "mapping-definition-properties";
internal const String MappingPropertyValue = "mapping-property-value";
internal const String NonEmptyString = "non-empty-string";
internal const String Null = "null";
internal const String NullDefinition = "null-definition";
internal const String NullDefinitionProperties = "null-definition-properties";
internal const String Number = "number";
internal const String NumberDefinition = "number-definition";
internal const String NumberDefinitionProperties = "number-definition-properties";
internal const String OneOf = "one-of";
internal const String OneOfDefinition = "one-of-definition";
internal const String OpenExpression = "${{";
internal const String PropertyValue = "property-value";
internal const String Properties = "properties";
internal const String Required = "required";
internal const String RequireNonEmpty = "require-non-empty";
internal const String Scalar = "scalar";
internal const String Sequence = "sequence";
internal const String SequenceDefinition = "sequence-definition";
internal const String SequenceDefinitionProperties = "sequence-definition-properties";
internal const String Type = "type";
internal const String SequenceOfNonEmptyString = "sequence-of-non-empty-string";
internal const String String = "string";
internal const String StringDefinition = "string-definition";
internal const String StringDefinitionProperties = "string-definition-properties";
internal const String Structure = "structure";
internal const String TemplateSchema = "template-schema";
internal const String Version = "version";
}
}

View File

@@ -0,0 +1,261 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using GitHub.Actions.Expressions;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Context object that is flowed through while loading and evaluating object templates
/// </summary>
public sealed class TemplateContext
{
internal CancellationToken CancellationToken { get; set; }
internal TemplateValidationErrors Errors
{
get
{
if (m_errors == null)
{
m_errors = new TemplateValidationErrors();
}
return m_errors;
}
set
{
m_errors = value;
}
}
/// <summary>
/// Available functions within expression contexts
/// </summary>
internal IList<IFunctionInfo> ExpressionFunctions
{
get
{
if (m_expressionFunctions == null)
{
m_expressionFunctions = new List<IFunctionInfo>();
}
return m_expressionFunctions;
}
}
/// <summary>
/// Available values within expression contexts
/// </summary>
internal IDictionary<String, Object> ExpressionValues
{
get
{
if (m_expressionValues == null)
{
m_expressionValues = new Dictionary<String, Object>(StringComparer.OrdinalIgnoreCase);
}
return m_expressionValues;
}
}
internal TemplateMemory Memory { get; set; }
internal TemplateSchema Schema { get; set; }
/// <summary>
/// Gets or sets the telemetry data collected during parsing.
/// </summary>
public Telemetry Telemetry
{
get
{
if (m_telemetry == null)
{
m_telemetry = new Telemetry();
}
return m_telemetry;
}
set
{
m_telemetry = value;
}
}
/// <summary>
/// State data for the current evaluation
/// </summary>
public IDictionary<String, Object> State
{
get
{
if (m_state == null)
{
m_state = new Dictionary<String, Object>(StringComparer.OrdinalIgnoreCase);
}
return m_state;
}
}
/// <summary>
/// Gets or sets a value indicating whether the expression function fromJson performs strict JSON parsing.
/// Used during evaluation only.
/// </summary>
internal Boolean StrictJsonParsing { get; set; }
internal ITraceWriter TraceWriter { get; set; }
private IDictionary<String, Int32> FileIds
{
get
{
if (m_fileIds == null)
{
m_fileIds = new Dictionary<String, Int32>(StringComparer.OrdinalIgnoreCase);
}
return m_fileIds;
}
set
{
m_fileIds = value;
}
}
private List<String> FileNames
{
get
{
if (m_fileNames == null)
{
m_fileNames = new List<String>();
}
return m_fileNames;
}
set
{
m_fileNames = value;
}
}
internal void Error(
TemplateToken value,
Exception ex)
{
Error(value?.FileId, value?.Line, value?.Column, ex);
}
internal void Error(
Int32? fileId,
Int32? line,
Int32? column,
Exception ex)
{
var prefix = GetErrorPrefix(fileId, line, column);
Errors.Add(prefix, ex);
TraceWriter.Error(prefix, ex);
}
internal void Error(
TemplateToken value,
String message)
{
Error(value?.FileId, value?.Line, value?.Column, message);
}
internal void Error(
Int32? fileId,
Int32? line,
Int32? column,
String message)
{
var prefix = GetErrorPrefix(fileId, line, column);
if (!String.IsNullOrEmpty(prefix))
{
message = $"{prefix} {message}";
}
Errors.Add(message);
TraceWriter.Error(message);
}
internal INamedValueInfo[] GetExpressionNamedValues()
{
if (m_expressionValues?.Count > 0)
{
return m_expressionValues.Keys.Select(x => new NamedValueInfo<ContextValueNode>(x)).ToArray();
}
return null;
}
internal Int32 GetFileId(String file)
{
if (!FileIds.TryGetValue(file, out Int32 id))
{
id = FileIds.Count + 1;
FileIds.Add(file, id);
FileNames.Add(file);
Memory.AddBytes(file);
}
return id;
}
internal String GetFileName(Int32 fileId)
{
return FileNames.Count >= fileId ? FileNames[fileId - 1] : null;
}
internal IReadOnlyList<String> GetFileTable()
{
return FileNames.AsReadOnly();
}
private String GetErrorPrefix(
Int32? fileId,
Int32? line,
Int32? column)
{
var fileName = fileId.HasValue ? GetFileName(fileId.Value) : null;
if (!String.IsNullOrEmpty(fileName))
{
if (line != null && column != null)
{
return $"{fileName} {TemplateStrings.LineColumn(line, column)}:";
}
else
{
return $"{fileName}:";
}
}
else if (line != null && column != null)
{
return $"{TemplateStrings.LineColumn(line, column)}:";
}
else
{
return String.Empty;
}
}
private TemplateValidationErrors m_errors;
private IList<IFunctionInfo> m_expressionFunctions;
private IDictionary<String, Object> m_expressionValues;
private IDictionary<String, Int32> m_fileIds;
private List<String> m_fileNames;
private IDictionary<String, Object> m_state;
private Telemetry m_telemetry;
}
}

View File

@@ -0,0 +1,428 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Expands expression tokens where the allowed context is available now. The allowed context is defined
/// within the schema. The available context is based on the ExpressionValues registered in the TemplateContext.
/// </summary>
internal partial class TemplateEvaluator
{
private TemplateEvaluator(
TemplateContext context,
TemplateToken template,
Int32 removeBytes)
{
m_context = context;
m_schema = context.Schema;
m_unraveler = new TemplateUnraveler(context, template, removeBytes);
}
internal static TemplateToken Evaluate(
TemplateContext context,
String type,
TemplateToken template,
Int32 removeBytes,
Int32? fileId)
{
TemplateToken result;
var evaluator = new TemplateEvaluator(context, template, removeBytes);
try
{
var availableContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
foreach (var key in context.ExpressionValues.Keys)
{
availableContext.Add(key);
}
foreach (var function in context.ExpressionFunctions)
{
availableContext.Add($"{function.Name}()");
}
var definitionInfo = new DefinitionInfo(context.Schema, type, availableContext);
result = evaluator.Evaluate(definitionInfo);
if (result != null)
{
evaluator.m_unraveler.ReadEnd();
}
}
catch (Exception ex)
{
context.Error(fileId, null, null, ex);
result = null;
}
return result;
}
private TemplateToken Evaluate(DefinitionInfo definition)
{
// Scalar
if (m_unraveler.AllowScalar(definition.Expand, out ScalarToken scalar))
{
if (scalar is LiteralToken literal)
{
Validate(ref literal, definition);
return literal;
}
else
{
return scalar;
}
}
// Sequence start
if (m_unraveler.AllowSequenceStart(definition.Expand, out SequenceToken sequence))
{
var sequenceDefinition = definition.Get<SequenceDefinition>().FirstOrDefault();
// Legal
if (sequenceDefinition != null)
{
var itemDefinition = new DefinitionInfo(definition, sequenceDefinition.ItemType);
// Add each item
while (!m_unraveler.AllowSequenceEnd(definition.Expand))
{
var item = Evaluate(itemDefinition);
sequence.Add(item);
}
}
// Illegal
else
{
// Error
m_context.Error(sequence, TemplateStrings.UnexpectedSequenceStart());
// Skip each item
while (!m_unraveler.AllowSequenceEnd(expand: false))
{
m_unraveler.SkipSequenceItem();
}
}
return sequence;
}
// Mapping
if (m_unraveler.AllowMappingStart(definition.Expand, out MappingToken mapping))
{
var mappingDefinitions = definition.Get<MappingDefinition>().ToList();
// Legal
if (mappingDefinitions.Count > 0)
{
if (mappingDefinitions.Count > 1 ||
m_schema.HasProperties(mappingDefinitions[0]) ||
String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType))
{
HandleMappingWithWellKnownProperties(definition, mappingDefinitions, mapping);
}
else
{
var keyDefinition = new DefinitionInfo(definition, mappingDefinitions[0].LooseKeyType);
var valueDefinition = new DefinitionInfo(definition, mappingDefinitions[0].LooseValueType);
HandleMappingWithAllLooseProperties(definition, keyDefinition, valueDefinition, mapping);
}
}
// Illegal
else
{
m_context.Error(mapping, TemplateStrings.UnexpectedMappingStart());
while (!m_unraveler.AllowMappingEnd(expand: false))
{
m_unraveler.SkipMappingKey();
m_unraveler.SkipMappingValue();
}
}
return mapping;
}
throw new ArgumentException(TemplateStrings.ExpectedScalarSequenceOrMapping());
}
private void HandleMappingWithWellKnownProperties(
DefinitionInfo definition,
List<MappingDefinition> mappingDefinitions,
MappingToken mapping)
{
// Check if loose properties are allowed
String looseKeyType = null;
String looseValueType = null;
DefinitionInfo? looseKeyDefinition = null;
DefinitionInfo? looseValueDefinition = null;
if (!String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType))
{
looseKeyType = mappingDefinitions[0].LooseKeyType;
looseValueType = mappingDefinitions[0].LooseValueType;
}
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var hasExpressionKey = false;
while (m_unraveler.AllowScalar(definition.Expand, out ScalarToken nextKeyScalar))
{
// Expression
if (nextKeyScalar is ExpressionToken)
{
hasExpressionKey = true;
var anyDefinition = new DefinitionInfo(definition, TemplateConstants.Any);
mapping.Add(nextKeyScalar, Evaluate(anyDefinition));
continue;
}
// Not a string, convert
if (!(nextKeyScalar is StringToken nextKey))
{
nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString());
}
// Duplicate
if (!keys.Add(nextKey.Value))
{
m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value));
m_unraveler.SkipMappingValue();
continue;
}
// Well known
if (m_schema.TryMatchKey(mappingDefinitions, nextKey.Value, out String nextValueType))
{
var nextValueDefinition = new DefinitionInfo(definition, nextValueType);
var nextValue = Evaluate(nextValueDefinition);
mapping.Add(nextKey, nextValue);
continue;
}
// Loose
if (looseKeyType != null)
{
if (looseKeyDefinition == null)
{
looseKeyDefinition = new DefinitionInfo(definition, looseKeyType);
looseValueDefinition = new DefinitionInfo(definition, looseValueType);
}
Validate(nextKey, looseKeyDefinition.Value);
var nextValue = Evaluate(looseValueDefinition.Value);
mapping.Add(nextKey, nextValue);
continue;
}
// Error
m_context.Error(nextKey, TemplateStrings.UnexpectedValue(nextKey.Value));
m_unraveler.SkipMappingValue();
}
// Only one
if (mappingDefinitions.Count > 1)
{
var hitCount = new Dictionary<String, Int32>();
foreach (MappingDefinition mapdef in mappingDefinitions)
{
foreach (String key in mapdef.Properties.Keys)
{
if (!hitCount.TryGetValue(key, out Int32 value))
{
hitCount.Add(key, 1);
}
else
{
hitCount[key] = value + 1;
}
}
}
List<String> nonDuplicates = new List<String>();
foreach (String key in hitCount.Keys)
{
if (hitCount[key] == 1)
{
nonDuplicates.Add(key);
}
}
nonDuplicates.Sort();
String listToDeDuplicate = String.Join(", ", nonDuplicates);
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
}
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
{
foreach (var property in mappingDefinitions[0].Properties)
{
if (property.Value.Required)
{
if (!keys.Contains(property.Key))
{
m_context.Error(mapping, $"Required property is missing: {property.Key}");
}
}
}
}
m_unraveler.ReadMappingEnd();
}
private void HandleMappingWithAllLooseProperties(
DefinitionInfo mappingDefinition,
DefinitionInfo keyDefinition,
DefinitionInfo valueDefinition,
MappingToken mapping)
{
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
while (m_unraveler.AllowScalar(mappingDefinition.Expand, out ScalarToken nextKeyScalar))
{
// Expression
if (nextKeyScalar is ExpressionToken)
{
if (nextKeyScalar is BasicExpressionToken)
{
mapping.Add(nextKeyScalar, Evaluate(valueDefinition));
}
else
{
var anyDefinition = new DefinitionInfo(mappingDefinition, TemplateConstants.Any);
mapping.Add(nextKeyScalar, Evaluate(anyDefinition));
}
continue;
}
// Not a string
if (!(nextKeyScalar is StringToken nextKey))
{
nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString());
}
// Duplicate
if (!keys.Add(nextKey.Value))
{
m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value));
m_unraveler.SkipMappingValue();
continue;
}
// Validate
Validate(nextKey, keyDefinition);
// Add the pair
var nextValue = Evaluate(valueDefinition);
mapping.Add(nextKey, nextValue);
}
m_unraveler.ReadMappingEnd();
}
private void Validate(
StringToken stringToken,
DefinitionInfo definition)
{
var literal = stringToken as LiteralToken;
Validate(ref literal, definition);
}
private void Validate(
ref LiteralToken literal,
DefinitionInfo definition)
{
// Legal
var literal2 = literal;
if (definition.Get<ScalarDefinition>().Any(x => x.IsMatch(literal2)))
{
return;
}
// Not a string, convert
if (literal.Type != TokenType.String)
{
var stringToken = new StringToken(literal.FileId, literal.Line, literal.Column, literal.ToString());
// Legal
if (definition.Get<StringDefinition>().Any(x => x.IsMatch(stringToken)))
{
literal = stringToken;
return;
}
}
// Illegal
m_context.Error(literal, TemplateStrings.UnexpectedValue(literal));
}
private struct DefinitionInfo
{
public DefinitionInfo(
TemplateSchema schema,
String name,
HashSet<String> availableContext)
{
m_schema = schema;
m_availableContext = availableContext;
// Lookup the definition
Definition = m_schema.GetDefinition(name);
// Determine whether to expand
m_allowedContext = Definition.EvaluatorContext;
if (Definition.EvaluatorContext.Length > 0)
{
Expand = m_availableContext.IsSupersetOf(m_allowedContext);
}
else
{
Expand = false;
}
}
public DefinitionInfo(
DefinitionInfo parent,
String name)
{
m_schema = parent.m_schema;
m_availableContext = parent.m_availableContext;
// Lookup the definition
Definition = m_schema.GetDefinition(name);
// Determine whether to expand
if (Definition.EvaluatorContext.Length > 0)
{
m_allowedContext = new HashSet<String>(parent.m_allowedContext.Concat(Definition.EvaluatorContext), StringComparer.OrdinalIgnoreCase).ToArray();
Expand = m_availableContext.IsSupersetOf(m_allowedContext);
}
else
{
m_allowedContext = parent.m_allowedContext;
Expand = parent.Expand;
}
}
public IEnumerable<T> Get<T>()
where T : Definition
{
return m_schema.Get<T>(Definition);
}
private HashSet<String> m_availableContext;
private String[] m_allowedContext;
private TemplateSchema m_schema;
public Definition Definition;
public Boolean Expand;
}
private readonly TemplateContext m_context;
private readonly TemplateSchema m_schema;
private readonly TemplateUnraveler m_unraveler;
}
}

View File

@@ -0,0 +1,385 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.Actions.Expressions.Data;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Tracks characteristics about the current memory usage (CPU, stack, size)
/// </summary>
public sealed class TemplateMemory
{
/// <summary>
/// Creates a new instance
/// </summary>
/// <param name="maxBytes">The maximum allowed bytes</param>
public TemplateMemory(Int32 maxBytes)
: this(0, 0, maxBytes: maxBytes, null)
{
}
/// <summary>
/// Creates a new instance
/// </summary>
/// <param name="maxDepth">The maximum allowed depth</param>
/// <param name="maxEvents">The maximum allowed events</param>
/// <param name="maxBytes">The maximum allowed bytes</param>
internal TemplateMemory(
Int32 maxDepth,
Int32 maxEvents,
Int32 maxBytes)
: this(maxDepth, maxEvents, maxBytes, null)
{
}
/// <summary>
/// Creates a new instance
/// </summary>
/// <param name="maxDepth">The maximum allowed depth</param>
/// <param name="maxEvents">The maximum allowed events</param>
/// <param name="maxBytes">The maximum allowed bytes</param>
/// <param name="parent">Optional parent instance, for byte tracking only. Any bytes added/subtracted to the current instance, will be also added/subtracted to the parent instance.</param>
internal TemplateMemory(
Int32 maxDepth,
Int32 maxEvents,
Int32 maxBytes,
TemplateMemory parent)
{
m_maxDepth = maxDepth;
m_maxEvents = maxEvents;
m_maxBytes = maxBytes;
m_parent = parent;
}
public Int32 CurrentBytes => m_currentBytes;
public Int32 MaxBytes => m_maxBytes;
public void AddBytes(Int32 bytes)
{
checked
{
m_currentBytes += bytes;
}
if (m_currentBytes > m_maxBytes)
{
throw new InvalidOperationException(TemplateStrings.MaxObjectSizeExceeded());
}
m_parent?.AddBytes(bytes);
}
public void AddBytes(String value)
{
var bytes = CalculateBytes(value);
AddBytes(bytes);
}
internal void AddBytes(
ExpressionData value,
Boolean traverse)
{
var bytes = CalculateBytes(value, traverse);
AddBytes(bytes);
}
internal void AddBytes(
JToken value,
Boolean traverse)
{
var bytes = CalculateBytes(value, traverse);
AddBytes(bytes);
}
internal void AddBytes(
TemplateToken value,
Boolean traverse = false)
{
var bytes = CalculateBytes(value, traverse);
AddBytes(bytes);
}
internal void AddBytes(LiteralToken literal)
{
var bytes = CalculateBytes(literal);
AddBytes(bytes);
}
internal void AddBytes(SequenceToken sequence)
{
var bytes = CalculateBytes(sequence);
AddBytes(bytes);
}
internal void AddBytes(MappingToken mapping)
{
var bytes = CalculateBytes(mapping);
AddBytes(bytes);
}
internal void AddBytes(BasicExpressionToken basicExpression)
{
var bytes = CalculateBytes(basicExpression);
AddBytes(bytes);
}
internal void AddBytes(InsertExpressionToken insertExpression)
{
var bytes = CalculateBytes(insertExpression);
AddBytes(bytes);
}
internal Int32 CalculateBytes(String value)
{
// This measurement doesn't have to be perfect
// https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/
checked
{
return StringBaseOverhead + ((value?.Length ?? 0) * sizeof(Char));
}
}
internal static Int32 CalculateBytes(
ExpressionData value,
Boolean traverse)
{
var enumerable = traverse ? value.Traverse() : new[] { value } as IEnumerable<ExpressionData>;
var result = 0;
foreach (var item in enumerable)
{
// This measurement doesn't have to be perfect
// https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/
if (item is StringExpressionData str)
{
checked
{
result += TemplateMemory.MinObjectSize + TemplateMemory.StringBaseOverhead + (str.Value.Length * sizeof(Char));
}
}
else if (item is ArrayExpressionData || item is DictionaryExpressionData || item is BooleanExpressionData || item is NumberExpressionData)
{
// Min object size is good enough. Allows for base + a few fields.
checked
{
result += TemplateMemory.MinObjectSize;
}
}
else if (item is null)
{
checked
{
result += IntPtr.Size;
}
}
else
{
throw new NotSupportedException($"Unexpected workflow context data type '{item.GetType().Name}'");
}
}
return result;
}
internal Int32 CalculateBytes(
JToken value,
Boolean traverse)
{
// This measurement doesn't have to be perfect
// https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/
if (value is null)
{
return MinObjectSize;
}
if (!traverse)
{
switch (value.Type)
{
case JTokenType.String:
checked
{
return StringBaseOverhead + (value.ToObject<String>().Length * sizeof(Char));
}
case JTokenType.Property:
var property = value as JProperty;
checked
{
return StringBaseOverhead + ((property.Name?.Length ?? 0) * sizeof(Char));
}
case JTokenType.Array:
case JTokenType.Boolean:
case JTokenType.Float:
case JTokenType.Integer:
case JTokenType.Null:
case JTokenType.Object:
return MinObjectSize;
default:
throw new NotSupportedException($"Unexpected JToken type '{value.Type}' when traversing object");
}
}
var result = 0;
do
{
// Descend as much as possible
while (true)
{
// Add bytes
var bytes = CalculateBytes(value, false);
checked
{
result += bytes;
}
// Descend
if (value.HasValues)
{
value = value.First;
}
// No more descendants
else
{
break;
}
}
// Next sibling or ancestor sibling
do
{
var sibling = value.Next;
// Sibling found
if (sibling != null)
{
value = sibling;
break;
}
// Ascend
value = value.Parent;
} while (value != null);
} while (value != null);
return result;
}
internal Int32 CalculateBytes(
TemplateToken value,
Boolean traverse = false)
{
var enumerable = traverse ? value.Traverse() : new[] { value };
var result = 0;
foreach (var item in enumerable)
{
// This measurement doesn't have to be perfect
// https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/
switch (item.Type)
{
case TokenType.Null:
case TokenType.Boolean:
case TokenType.Number:
checked
{
result += MinObjectSize;
}
break;
case TokenType.String:
var stringToken = item as StringToken;
checked
{
result += MinObjectSize + StringBaseOverhead + ((stringToken.Value?.Length ?? 0) * sizeof(Char));
}
break;
case TokenType.Sequence:
case TokenType.Mapping:
case TokenType.InsertExpression:
// Min object size is good enough. Allows for base + a few fields.
checked
{
result += MinObjectSize;
}
break;
case TokenType.BasicExpression:
var basicExpression = item as BasicExpressionToken;
checked
{
result += MinObjectSize + StringBaseOverhead + ((basicExpression.Expression?.Length ?? 0) * sizeof(Char));
}
break;
default:
throw new NotSupportedException($"Unexpected template type '{item.Type}'");
}
}
return result;
}
internal void SubtractBytes(Int32 bytes)
{
if (bytes > m_currentBytes)
{
throw new InvalidOperationException("Bytes to subtract exceeds total bytes");
}
m_currentBytes -= bytes;
m_parent?.SubtractBytes(bytes);
}
internal void SubtractBytes(
TemplateToken value,
Boolean traverse = false)
{
var bytes = CalculateBytes(value, traverse);
SubtractBytes(bytes);
}
internal void IncrementDepth()
{
if (m_depth++ >= m_maxDepth)
{
throw new InvalidOperationException(TemplateStrings.MaxObjectDepthExceeded());
}
}
internal void DecrementDepth()
{
m_depth--;
}
internal void IncrementEvents()
{
if (m_events++ >= m_maxEvents)
{
throw new InvalidOperationException(TemplateStrings.MaxTemplateEventsExceeded());
}
}
internal const Int32 MinObjectSize = 24;
internal const Int32 StringBaseOverhead = 26;
private readonly Int32 m_maxDepth;
private readonly Int32 m_maxEvents;
private readonly Int32 m_maxBytes;
private Int32 m_depth;
private Int32 m_events;
private Int32 m_currentBytes;
private TemplateMemory m_parent;
}
}

View File

@@ -0,0 +1,832 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
using GitHub.Actions.Expressions.Sdk;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Converts a source object format into a TemplateToken
/// </summary>
internal sealed class TemplateReader
{
private TemplateReader(
TemplateContext context,
TemplateSchema schema,
IObjectReader objectReader,
Int32? fileId)
{
m_context = context;
m_schema = schema;
m_memory = context.Memory;
m_objectReader = objectReader;
m_fileId = fileId;
}
internal static TemplateToken Read(
TemplateContext context,
String type,
IObjectReader objectReader,
Int32? fileId,
out Int32 bytes)
{
return Read(context, type, objectReader, fileId, context.Schema, out bytes);
}
internal static TemplateToken Read(
TemplateContext context,
String type,
IObjectReader objectReader,
Int32? fileId,
TemplateSchema schema,
out Int32 bytes)
{
TemplateToken result = null;
var reader = new TemplateReader(context, schema, objectReader, fileId);
var originalBytes = context.Memory.CurrentBytes;
try
{
objectReader.ValidateStart();
var definition = new DefinitionInfo(schema, type);
result = reader.ReadValue(definition);
objectReader.ValidateEnd();
}
catch (Exception ex)
{
context.Error(fileId, null, null, ex);
}
finally
{
bytes = context.Memory.CurrentBytes - originalBytes;
}
return result;
}
private TemplateToken ReadValue(DefinitionInfo definition)
{
m_memory.IncrementEvents();
// Scalar
if (m_objectReader.AllowLiteral(out LiteralToken literal))
{
var scalar = ParseScalar(literal, definition.AllowedContext);
Validate(ref scalar, definition);
m_memory.AddBytes(scalar);
return scalar;
}
// Sequence
if (m_objectReader.AllowSequenceStart(out SequenceToken sequence))
{
m_memory.IncrementDepth();
m_memory.AddBytes(sequence);
var sequenceDefinition = definition.Get<SequenceDefinition>().FirstOrDefault();
// Legal
if (sequenceDefinition != null)
{
var itemDefinition = new DefinitionInfo(definition, sequenceDefinition.ItemType);
// Add each item
while (!m_objectReader.AllowSequenceEnd())
{
var item = ReadValue(itemDefinition);
sequence.Add(item);
}
}
// Illegal
else
{
// Error
m_context.Error(sequence, TemplateStrings.UnexpectedSequenceStart());
// Skip each item
while (!m_objectReader.AllowSequenceEnd())
{
SkipValue();
}
}
m_memory.DecrementDepth();
return sequence;
}
// Mapping
if (m_objectReader.AllowMappingStart(out MappingToken mapping))
{
m_memory.IncrementDepth();
m_memory.AddBytes(mapping);
var mappingDefinitions = definition.Get<MappingDefinition>().ToList();
// Legal
if (mappingDefinitions.Count > 0)
{
if (mappingDefinitions.Count > 1 ||
m_schema.HasProperties(mappingDefinitions[0]) ||
String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType))
{
HandleMappingWithWellKnownProperties(definition, mappingDefinitions, mapping);
}
else
{
var keyDefinition = new DefinitionInfo(definition, mappingDefinitions[0].LooseKeyType);
var valueDefinition = new DefinitionInfo(definition, mappingDefinitions[0].LooseValueType);
HandleMappingWithAllLooseProperties(definition, keyDefinition, valueDefinition, mapping);
}
}
// Illegal
else
{
m_context.Error(mapping, TemplateStrings.UnexpectedMappingStart());
while (!m_objectReader.AllowMappingEnd())
{
SkipValue();
SkipValue();
}
}
m_memory.DecrementDepth();
return mapping;
}
throw new InvalidOperationException(TemplateStrings.ExpectedScalarSequenceOrMapping());
}
private void HandleMappingWithWellKnownProperties(
DefinitionInfo definition,
List<MappingDefinition> mappingDefinitions,
MappingToken mapping)
{
// Check if loose properties are allowed
String looseKeyType = null;
String looseValueType = null;
DefinitionInfo? looseKeyDefinition = null;
DefinitionInfo? looseValueDefinition = null;
if (!String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType))
{
looseKeyType = mappingDefinitions[0].LooseKeyType;
looseValueType = mappingDefinitions[0].LooseValueType;
}
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var hasExpressionKey = false;
while (m_objectReader.AllowLiteral(out LiteralToken rawLiteral))
{
var nextKeyScalar = ParseScalar(rawLiteral, definition.AllowedContext);
// Expression
if (nextKeyScalar is ExpressionToken)
{
hasExpressionKey = true;
// Legal
if (definition.AllowedContext.Length > 0)
{
m_memory.AddBytes(nextKeyScalar);
var anyDefinition = new DefinitionInfo(definition, TemplateConstants.Any);
mapping.Add(nextKeyScalar, ReadValue(anyDefinition));
}
// Illegal
else
{
m_context.Error(nextKeyScalar, TemplateStrings.ExpressionNotAllowed());
SkipValue();
}
continue;
}
// Not a string, convert
if (!(nextKeyScalar is StringToken nextKey))
{
nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString());
}
// Duplicate
if (!keys.Add(nextKey.Value))
{
m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value));
SkipValue();
continue;
}
// Well known
if (m_schema.TryMatchKey(mappingDefinitions, nextKey.Value, out String nextValueType))
{
m_memory.AddBytes(nextKey);
var nextValueDefinition = new DefinitionInfo(definition, nextValueType);
var nextValue = ReadValue(nextValueDefinition);
mapping.Add(nextKey, nextValue);
continue;
}
// Loose
if (looseKeyType != null)
{
if (looseKeyDefinition == null)
{
looseKeyDefinition = new DefinitionInfo(definition, looseKeyType);
looseValueDefinition = new DefinitionInfo(definition, looseValueType);
}
Validate(nextKey, looseKeyDefinition.Value);
m_memory.AddBytes(nextKey);
var nextValue = ReadValue(looseValueDefinition.Value);
mapping.Add(nextKey, nextValue);
continue;
}
// Error
m_context.Error(nextKey, TemplateStrings.UnexpectedValue(nextKey.Value));
SkipValue();
}
// Only one
if (mappingDefinitions.Count > 1)
{
var hitCount = new Dictionary<String, Int32>();
foreach (MappingDefinition mapdef in mappingDefinitions)
{
foreach (String key in mapdef.Properties.Keys)
{
if (!hitCount.TryGetValue(key, out Int32 value))
{
hitCount.Add(key, 1);
}
else
{
hitCount[key] = value + 1;
}
}
}
List<String> nonDuplicates = new List<String>();
foreach (String key in hitCount.Keys)
{
if(hitCount[key] == 1)
{
nonDuplicates.Add(key);
}
}
nonDuplicates.Sort();
String listToDeDuplicate = String.Join(", ", nonDuplicates);
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
}
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
{
foreach (var property in mappingDefinitions[0].Properties)
{
if (property.Value.Required)
{
if (!keys.Contains(property.Key))
{
m_context.Error(mapping, $"Required property is missing: {property.Key}");
}
}
}
}
ExpectMappingEnd();
}
private void HandleMappingWithAllLooseProperties(
DefinitionInfo mappingDefinition,
DefinitionInfo keyDefinition,
DefinitionInfo valueDefinition,
MappingToken mapping)
{
TemplateToken nextValue;
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
while (m_objectReader.AllowLiteral(out LiteralToken rawLiteral))
{
var nextKeyScalar = ParseScalar(rawLiteral, mappingDefinition.AllowedContext);
// Expression
if (nextKeyScalar is ExpressionToken)
{
// Legal
if (mappingDefinition.AllowedContext.Length > 0)
{
m_memory.AddBytes(nextKeyScalar);
nextValue = ReadValue(valueDefinition);
mapping.Add(nextKeyScalar, nextValue);
}
// Illegal
else
{
m_context.Error(nextKeyScalar, TemplateStrings.ExpressionNotAllowed());
SkipValue();
}
continue;
}
// Not a string, convert
if (!(nextKeyScalar is StringToken nextKey))
{
nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString());
}
// Duplicate
if (!keys.Add(nextKey.Value))
{
m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value));
SkipValue();
continue;
}
// Validate
Validate(nextKey, keyDefinition);
m_memory.AddBytes(nextKey);
// Add the pair
nextValue = ReadValue(valueDefinition);
mapping.Add(nextKey, nextValue);
}
ExpectMappingEnd();
}
private void ExpectMappingEnd()
{
if (!m_objectReader.AllowMappingEnd())
{
throw new Exception("Expected mapping end"); // Should never happen
}
}
private void SkipValue(Boolean error = false)
{
m_memory.IncrementEvents();
// Scalar
if (m_objectReader.AllowLiteral(out LiteralToken literal))
{
if (error)
{
m_context.Error(literal, TemplateStrings.UnexpectedValue(literal));
}
return;
}
// Sequence
if (m_objectReader.AllowSequenceStart(out SequenceToken sequence))
{
m_memory.IncrementDepth();
if (error)
{
m_context.Error(sequence, TemplateStrings.UnexpectedSequenceStart());
}
while (!m_objectReader.AllowSequenceEnd())
{
SkipValue();
}
m_memory.DecrementDepth();
return;
}
// Mapping
if (m_objectReader.AllowMappingStart(out MappingToken mapping))
{
m_memory.IncrementDepth();
if (error)
{
m_context.Error(mapping, TemplateStrings.UnexpectedMappingStart());
}
while (!m_objectReader.AllowMappingEnd())
{
SkipValue();
SkipValue();
}
m_memory.DecrementDepth();
return;
}
// Unexpected
throw new InvalidOperationException(TemplateStrings.ExpectedScalarSequenceOrMapping());
}
private void Validate(
StringToken stringToken,
DefinitionInfo definition)
{
var scalar = stringToken as ScalarToken;
Validate(ref scalar, definition);
}
private void Validate(
ref ScalarToken scalar,
DefinitionInfo definition)
{
switch (scalar.Type)
{
case TokenType.Null:
case TokenType.Boolean:
case TokenType.Number:
case TokenType.String:
var literal = scalar as LiteralToken;
// Legal
if (definition.Get<ScalarDefinition>().Any(x => x.IsMatch(literal)))
{
return;
}
// Not a string, convert
if (literal.Type != TokenType.String)
{
literal = new StringToken(literal.FileId, literal.Line, literal.Column, literal.ToString());
// Legal
if (definition.Get<StringDefinition>().Any(x => x.IsMatch(literal)))
{
scalar = literal;
return;
}
}
// Illegal
m_context.Error(literal, TemplateStrings.UnexpectedValue(literal));
break;
case TokenType.BasicExpression:
// Illegal
if (definition.AllowedContext.Length == 0)
{
m_context.Error(scalar, TemplateStrings.ExpressionNotAllowed());
}
break;
default:
m_context.Error(scalar, TemplateStrings.UnexpectedValue(scalar));
break;
}
}
private ScalarToken ParseScalar(
LiteralToken token,
String[] allowedContext)
{
// Not a string
if (token.Type != TokenType.String)
{
return token;
}
// Check if the value is definitely a literal
var raw = token.ToString();
Int32 startExpression;
if (String.IsNullOrEmpty(raw) ||
(startExpression = raw.IndexOf(TemplateConstants.OpenExpression)) < 0) // Doesn't contain ${{
{
return token;
}
// Break the value into segments of LiteralToken and ExpressionToken
var segments = new List<ScalarToken>();
var i = 0;
while (i < raw.Length)
{
// An expression starts here:
if (i == startExpression)
{
// Find the end of the expression - i.e. }}
startExpression = i;
var endExpression = -1;
var inString = false;
for (i += TemplateConstants.OpenExpression.Length; i < raw.Length; i++)
{
if (raw[i] == '\'')
{
inString = !inString; // Note, this handles escaped single quotes gracefully. Ex. 'foo''bar'
}
else if (!inString && raw[i] == '}' && raw[i - 1] == '}')
{
endExpression = i;
i++;
break;
}
}
// Check if not closed
if (endExpression < startExpression)
{
m_context.Error(token, TemplateStrings.ExpressionNotClosed());
return token;
}
// Parse the expression
var rawExpression = raw.Substring(
startExpression + TemplateConstants.OpenExpression.Length,
endExpression - startExpression + 1 - TemplateConstants.OpenExpression.Length - TemplateConstants.CloseExpression.Length);
var expression = ParseExpression(token.Line, token.Column, rawExpression, allowedContext, out Exception ex);
// Check for error
if (ex != null)
{
m_context.Error(token, ex);
return token;
}
// Check if a directive was used when not allowed
if (!String.IsNullOrEmpty(expression.Directive) &&
((startExpression != 0) || (i < raw.Length)))
{
m_context.Error(token, TemplateStrings.DirectiveNotAllowedInline(expression.Directive));
return token;
}
// Add the segment
segments.Add(expression);
// Look for the next expression
startExpression = raw.IndexOf(TemplateConstants.OpenExpression, i);
}
// The next expression is further ahead:
else if (i < startExpression)
{
// Append the segment
AddString(segments, token.Line, token.Column, raw.Substring(i, startExpression - i));
// Adjust the position
i = startExpression;
}
// No remaining expressions:
else
{
AddString(segments, token.Line, token.Column, raw.Substring(i));
break;
}
}
// Check if can convert to a literal
// For example, the escaped expression: ${{ '{{ this is a literal }}' }}
if (segments.Count == 1 &&
segments[0] is BasicExpressionToken basicExpression &&
IsExpressionString(basicExpression.Expression, out String str))
{
return new StringToken(m_fileId, token.Line, token.Column, str);
}
// Check if only ony segment
if (segments.Count == 1)
{
return segments[0];
}
// Build the new expression, using the format function
var format = new StringBuilder();
var args = new StringBuilder();
var argIndex = 0;
foreach (var segment in segments)
{
if (segment is StringToken literal)
{
var text = ExpressionUtility.StringEscape(literal.Value) // Escape quotes
.Replace("{", "{{") // Escape braces
.Replace("}", "}}");
format.Append(text);
}
else
{
format.Append("{" + argIndex.ToString(CultureInfo.InvariantCulture) + "}"); // Append formatter
argIndex++;
var expression = segment as BasicExpressionToken;
args.Append(", ");
args.Append(expression.Expression);
}
}
var finalExpression = $"format('{format}'{args})";
if (!ExpressionToken.IsValidExpression(finalExpression, allowedContext, out Exception ex2))
{
m_context.Error(token, ex2);
return token;
}
return new BasicExpressionToken(m_fileId, token.Line, token.Column, finalExpression);
}
private ExpressionToken ParseExpression(
Int32? line,
Int32? column,
String value,
String[] allowedContext,
out Exception ex)
{
var trimmed = value.Trim();
// Check if the value is empty
if (String.IsNullOrEmpty(trimmed))
{
ex = new ArgumentException(TemplateStrings.ExpectedExpression());
return null;
}
// Try to find a matching directive
List<String> parameters;
if (MatchesDirective(trimmed, TemplateConstants.InsertDirective, 0, out parameters, out ex))
{
return new InsertExpressionToken(m_fileId, line, column);
}
else if (ex != null)
{
return null;
}
// Check if the value is an expression
if (!ExpressionToken.IsValidExpression(trimmed, allowedContext, out ex))
{
return null;
}
// Return the expression
return new BasicExpressionToken(m_fileId, line, column, trimmed);
}
private void AddString(
List<ScalarToken> segments,
Int32? line,
Int32? column,
String value)
{
// If the last segment was a LiteralToken, then append to the last segment
if (segments.Count > 0 && segments[segments.Count - 1] is StringToken lastSegment)
{
segments[segments.Count - 1] = new StringToken(m_fileId, line, column, lastSegment.Value + value);
}
// Otherwise add a new LiteralToken
else
{
segments.Add(new StringToken(m_fileId, line, column, value));
}
}
private static Boolean MatchesDirective(
String trimmed,
String directive,
Int32 expectedParameters,
out List<String> parameters,
out Exception ex)
{
if (trimmed.StartsWith(directive, StringComparison.Ordinal) &&
(trimmed.Length == directive.Length || Char.IsWhiteSpace(trimmed[directive.Length])))
{
parameters = new List<String>();
var startIndex = directive.Length;
var inString = false;
var parens = 0;
for (var i = startIndex; i < trimmed.Length; i++)
{
var c = trimmed[i];
if (Char.IsWhiteSpace(c) && !inString && parens == 0)
{
if (startIndex < i)
{
parameters.Add(trimmed.Substring(startIndex, i - startIndex));
}
startIndex = i + 1;
}
else if (c == '\'')
{
inString = !inString;
}
else if (c == '(' && !inString)
{
parens++;
}
else if (c == ')' && !inString)
{
parens--;
}
}
if (startIndex < trimmed.Length)
{
parameters.Add(trimmed.Substring(startIndex));
}
if (expectedParameters != parameters.Count)
{
ex = new ArgumentException(TemplateStrings.ExpectedNParametersFollowingDirective(expectedParameters, directive, parameters.Count));
parameters = null;
return false;
}
ex = null;
return true;
}
ex = null;
parameters = null;
return false;
}
private static Boolean IsExpressionString(
String trimmed,
out String str)
{
var builder = new StringBuilder();
var inString = false;
for (var i = 0; i < trimmed.Length; i++)
{
var c = trimmed[i];
if (c == '\'')
{
inString = !inString;
if (inString && i != 0)
{
builder.Append(c);
}
}
else if (!inString)
{
str = default;
return false;
}
else
{
builder.Append(c);
}
}
str = builder.ToString();
return true;
}
private struct DefinitionInfo
{
public DefinitionInfo(
TemplateSchema schema,
String name)
{
m_schema = schema;
// Lookup the definition
Definition = m_schema.GetDefinition(name);
// Record allowed context
AllowedContext = Definition.ReaderContext;
}
public DefinitionInfo(
DefinitionInfo parent,
String name)
{
m_schema = parent.m_schema;
// Lookup the definition
Definition = m_schema.GetDefinition(name);
// Record allowed context
if (Definition.ReaderContext.Length > 0)
{
AllowedContext = new HashSet<String>(parent.AllowedContext.Concat(Definition.ReaderContext), StringComparer.OrdinalIgnoreCase).ToArray();
}
else
{
AllowedContext = parent.AllowedContext;
}
}
public IEnumerable<T> Get<T>()
where T : Definition
{
return m_schema.Get<T>(Definition);
}
private TemplateSchema m_schema;
public Definition Definition;
public String[] AllowedContext;
}
private readonly TemplateContext m_context;
private readonly Int32? m_fileId;
private readonly TemplateMemory m_memory;
private readonly IObjectReader m_objectReader;
private readonly TemplateSchema m_schema;
}
}

View File

@@ -0,0 +1,281 @@
// <auto-generated/>
// *** AUTOMATICALLY GENERATED BY GenResourceClass -- DO NOT EDIT!!! ***
using System;
using System.Diagnostics;
using System.ComponentModel;
using System.Globalization;
using System.Reflection;
using System.Resources;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating {
internal static class TemplateStrings
{
//********************************************************************************************
/// Creates the resource manager instance.
//********************************************************************************************
static TemplateStrings()
{
s_resMgr = new ResourceManager("GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateStrings", typeof(TemplateStrings).GetTypeInfo().Assembly);
}
public static ResourceManager Manager
{
get
{
return s_resMgr;
}
}
//********************************************************************************************
/// Returns a localized string given a resource string name.
//********************************************************************************************
public static String Get(
String resourceName)
{
return s_resMgr.GetString(resourceName, CultureInfo.CurrentUICulture);
}
//********************************************************************************************
/// Returns a localized integer given a resource string name.
//********************************************************************************************
public static int GetInt(
String resourceName)
{
return (int)s_resMgr.GetObject(resourceName, CultureInfo.CurrentUICulture);
}
//********************************************************************************************
/// Returns a localized string given a resource string name.
//********************************************************************************************
public static bool GetBool(
String resourceName)
{
return (bool)s_resMgr.GetObject(resourceName, CultureInfo.CurrentUICulture);
}
//********************************************************************************************
/// A little helper function to alleviate some typing associated with loading resources and
/// formatting the strings. In DEBUG builds, it also asserts that the number of format
/// arguments and the length of args match.
//********************************************************************************************
private static String Format( // The formatted resource string.
String resourceName, // The name of the resource.
params Object[] args) // Arguments to format.
{
String resource = Get(resourceName);
#if DEBUG
// Check to make sure that the number of format string arguments matches the number of
// arguments passed in.
int formatArgCount = 0;
bool[] argSeen = new bool[100];
for (int i = 0; i < resource.Length; i++)
{
if (resource[i] == '{')
{
if (i + 1 < resource.Length &&
resource[i + 1] == '{')
{
i++; // Skip the escaped curly braces.
}
else
{
// Move past the curly brace and leading whitespace.
i++;
while (Char.IsWhiteSpace(resource[i]))
{
i++;
}
// Get the argument number.
int length = 0;
while (i + length < resource.Length && Char.IsDigit(resource[i + length]))
{
length++;
}
// Record it if it hasn't already been seen.
int argNumber = int.Parse(resource.Substring(i, length), CultureInfo.InvariantCulture);
if (!argSeen[argNumber])
{
formatArgCount++; // Count it as a formatting argument.
argSeen[argNumber] = true;
}
}
}
}
Debug.Assert(args != null || formatArgCount == 0,
String.Format(CultureInfo.InvariantCulture, "The number of format arguments is {0}, but the args parameter is null.", formatArgCount));
Debug.Assert(args == null || formatArgCount == args.Length,
String.Format(CultureInfo.InvariantCulture, "Coding error using resource \"{0}\": The number of format arguments {1} != number of args {2}",
resourceName, formatArgCount, args != null ? args.Length : 0));
#endif // DEBUG
if (args == null)
{
return resource;
}
// If there are any DateTime structs in the arguments, we need to bracket them
// to make sure they are within the supported range of the current calendar.
for (int i = 0; i < args.Length; i++)
{
// DateTime is a struct, we cannot use the as operator and null check.
if (args[i] is DateTime)
{
DateTime dateTime = (DateTime)args[i];
// We need to fetch the calendar on each Format call since it may change.
// Since we don't have more than one DateTime for resource, do not
// bother to cache this for the duration of the for loop.
Calendar calendar = DateTimeFormatInfo.CurrentInfo.Calendar;
if (dateTime > calendar.MaxSupportedDateTime)
{
args[i] = calendar.MaxSupportedDateTime;
}
else if (dateTime < calendar.MinSupportedDateTime)
{
args[i] = calendar.MinSupportedDateTime;
}
}
}
return String.Format(CultureInfo.CurrentCulture, resource, args);
}
// According to the documentation for the ResourceManager class, it should be sufficient to
// create a single static instance. The following is an excerpt from the 1.1 documentation.
// Using the methods of ResourceManager, a caller can access the resources for a particular
// culture using the GetObject and GetString methods. By default, these methods return the
// resource for the culture determined by the current cultural settings of the thread that made
// the call.
private static ResourceManager s_resMgr;
/// <summary>
/// The expression directive &apos;{0}&apos; is not supported in this context
/// </summary>
public static String DirectiveNotAllowed(object arg0) { return Format("DirectiveNotAllowed", arg0); }
/// <summary>
/// The directive &apos;{0}&apos; is not allowed in this context. Directives are not supported for expressions that are embedded within a string. Directives are only supported when the entire value is an expression.
/// </summary>
public static String DirectiveNotAllowedInline(object arg0) { return Format("DirectiveNotAllowedInline", arg0); }
/// <summary>
/// An expression was expected
/// </summary>
public static String ExpectedExpression() { return Get("ExpectedExpression"); }
/// <summary>
/// Expected a mapping
/// </summary>
public static String ExpectedMapping() { return Get("ExpectedMapping"); }
/// <summary>
/// Exactly {0} parameter(s) were expected following the directive &apos;{1}&apos;. Actual parameter count: {2}
/// </summary>
public static String ExpectedNParametersFollowingDirective(object arg0, object arg1, object arg2) { return Format("ExpectedNParametersFollowingDirective", arg0, arg1, arg2); }
/// <summary>
/// Expected a scalar value
/// </summary>
public static String ExpectedScalar() { return Get("ExpectedScalar"); }
/// <summary>
/// Expected a scalar value, a sequence, or a mapping
/// </summary>
public static String ExpectedScalarSequenceOrMapping() { return Get("ExpectedScalarSequenceOrMapping"); }
/// <summary>
/// Expected a sequence
/// </summary>
public static String ExpectedSequence() { return Get("ExpectedSequence"); }
/// <summary>
/// A template expression is not allowed in this context
/// </summary>
public static String ExpressionNotAllowed() { return Get("ExpressionNotAllowed"); }
/// <summary>
/// The expression is not closed. An unescaped ${{ sequence was found, but the closing }} sequence was not found.
/// </summary>
public static String ExpressionNotClosed() { return Get("ExpressionNotClosed"); }
/// <summary>
/// Error message prefix that indicates the line and column where the error occurred
///
/// (Line: {0}, Col: {1})
/// </summary>
public static String LineColumn(object arg0, object arg1) { return Format("LineColumn", arg0, arg1); }
/// <summary>
/// Maximum object depth exceeded
/// </summary>
public static String MaxObjectDepthExceeded() { return Get("MaxObjectDepthExceeded"); }
/// <summary>
/// Maximum object size exceeded
/// </summary>
public static String MaxObjectSizeExceeded() { return Get("MaxObjectSizeExceeded"); }
/// <summary>
/// Maximum events exceeded while evaluating the template. This may indicate an infinite loop or too many nested loops.
/// </summary>
public static String MaxTemplateEventsExceeded() { return Get("MaxTemplateEventsExceeded"); }
/// <summary>
/// The template is not valid.
/// </summary>
public static String TemplateNotValid() { return Get("TemplateNotValid"); }
/// <summary>
/// The template is not valid. {0}
/// </summary>
public static String TemplateNotValidWithErrors(object arg0) { return Format("TemplateNotValidWithErrors", arg0); }
/// <summary>
/// In {0} {1}: Error from called workflow
/// </summary>
public static String CalledWorkflowNotValidWithErrors(object arg0, object arg1) { return Format("CalledWorkflowNotValidWithErrors", arg0, arg1); }
/// <summary>
/// Unable to convert the object to a template token. Actual type &apos;{0}&apos;
/// </summary>
public static String UnableToConvertToTemplateToken(object arg0) { return Format("UnableToConvertToTemplateToken", arg0); }
/// <summary>
/// There&apos;s not enough info to determine what you meant. Add one of these properties: {0}
/// </summary>
public static String UnableToDetermineOneOf(object arg0) { return Format("UnableToDetermineOneOf", arg0); }
/// <summary>
/// A mapping was not expected
/// </summary>
public static String UnexpectedMappingStart() { return Get("UnexpectedMappingStart"); }
/// <summary>
/// A sequence was not expected
/// </summary>
public static String UnexpectedSequenceStart() { return Get("UnexpectedSequenceStart"); }
/// <summary>
/// Unexpected value &apos;{0}&apos;
/// </summary>
public static String UnexpectedValue(object arg0) { return Format("UnexpectedValue", arg0); }
/// <summary>
/// &apos;{0}&apos; is already defined
/// </summary>
public static String ValueAlreadyDefined(object arg0) { return Format("ValueAlreadyDefined", arg0); }
}
} // namespace

View File

@@ -0,0 +1,190 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<data name="DirectiveNotAllowed" xml:space="preserve">
<value>The expression directive '{0}' is not supported in this context</value>
</data>
<data name="DirectiveNotAllowedInline" xml:space="preserve">
<value>The directive '{0}' is not allowed in this context. Directives are not supported for expressions that are embedded within a string. Directives are only supported when the entire value is an expression.</value>
</data>
<data name="ExpectedExpression" xml:space="preserve">
<value>An expression was expected</value>
</data>
<data name="ExpectedMapping" xml:space="preserve">
<value>Expected a mapping</value>
</data>
<data name="ExpectedNParametersFollowingDirective" xml:space="preserve">
<value>Exactly {0} parameter(s) were expected following the directive '{1}'. Actual parameter count: {2}</value>
</data>
<data name="ExpectedScalar" xml:space="preserve">
<value>Expected a scalar value</value>
</data>
<data name="ExpectedScalarSequenceOrMapping" xml:space="preserve">
<value>Expected a scalar value, a sequence, or a mapping</value>
</data>
<data name="ExpectedSequence" xml:space="preserve">
<value>Expected a sequence</value>
</data>
<data name="ExpressionNotAllowed" xml:space="preserve">
<value>A template expression is not allowed in this context</value>
</data>
<data name="ExpressionNotClosed" xml:space="preserve">
<value>The expression is not closed. An unescaped ${{ sequence was found, but the closing }} sequence was not found.</value>
</data>
<data name="LineColumn" xml:space="preserve">
<value>(Line: {0}, Col: {1})</value>
<comment>Error message prefix that indicates the line and column where the error occurred</comment>
</data>
<data name="MaxObjectDepthExceeded" xml:space="preserve">
<value>Maximum object depth exceeded</value>
</data>
<data name="MaxObjectSizeExceeded" xml:space="preserve">
<value>Maximum object size exceeded</value>
</data>
<data name="MaxTemplateEventsExceeded" xml:space="preserve">
<value>Maximum events exceeded while evaluating the template. This may indicate an infinite loop or too many nested loops.</value>
</data>
<data name="TemplateNotValid" xml:space="preserve">
<value>The template is not valid.</value>
</data>
<data name="TemplateNotValidWithErrors" xml:space="preserve">
<value>The template is not valid. {0}</value>
</data>
<data name="CalledWorkflowNotValidWithErrors" xml:space="preserve">
<value>In {0} {1}: Error from called workflow</value>
</data>
<data name="UnableToConvertToTemplateToken" xml:space="preserve">
<value>Unable to convert the object to a template token. Actual type '{0}'</value>
</data>
<data name="UnableToDetermineOneOf" xml:space="preserve">
<value>There's not enough info to determine what you meant. Add one of these properties: {0}</value>
</data>
<data name="UnexpectedMappingStart" xml:space="preserve">
<value>A mapping was not expected</value>
</data>
<data name="UnexpectedSequenceStart" xml:space="preserve">
<value>A sequence was not expected</value>
</data>
<data name="UnexpectedValue" xml:space="preserve">
<value>Unexpected value '{0}'</value>
</data>
<data name="ValueAlreadyDefined" xml:space="preserve">
<value>'{0}' is already defined</value>
</data>
</root>

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,60 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Provides information about an error which occurred during validation.
/// </summary>
[DataContract]
public class TemplateValidationError
{
public TemplateValidationError()
{
}
public TemplateValidationError(String message)
: this(null, message)
{
}
public TemplateValidationError(
String code,
String message)
{
Code = code;
Message = message;
}
[DataMember(Name = "code", EmitDefaultValue = false)]
public String Code
{
get;
set;
}
[DataMember(Name = "Message", EmitDefaultValue = false)]
public String Message
{
get;
set;
}
public static IEnumerable<TemplateValidationError> Create(Exception exception)
{
for (int i = 0; i < 50; i++)
{
yield return new TemplateValidationError(exception.Message);
if (exception.InnerException == null)
{
break;
}
exception = exception.InnerException;
}
}
}
}

View File

@@ -0,0 +1,157 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Provides information about an error which occurred during validation.
/// </summary>
[DataContract]
public sealed class TemplateValidationErrors : IEnumerable<TemplateValidationError>
{
public TemplateValidationErrors()
{
}
public TemplateValidationErrors(
Int32 maxErrors,
Int32 maxMessageLength)
{
m_maxErrors = maxErrors;
m_maxMessageLength = maxMessageLength;
}
public Int32 Count => m_errors.Count;
public void Add(String message)
{
Add(new TemplateValidationError(message));
}
public void Add(Exception ex)
{
Add(null, ex);
}
public void Add(String messagePrefix, Exception ex)
{
for (int i = 0; i < 50; i++)
{
String message = !String.IsNullOrEmpty(messagePrefix) ? $"{messagePrefix} {ex.Message}" : ex.Message;
Add(new TemplateValidationError(message));
if (ex.InnerException == null)
{
break;
}
ex = ex.InnerException;
}
}
public void Add(IEnumerable<TemplateValidationError> errors)
{
foreach (var error in errors)
{
Add(error);
}
}
public void Add(TemplateValidationError error)
{
// Check max errors
if (m_maxErrors <= 0 ||
m_errors.Count < m_maxErrors)
{
// Check max message length
if (m_maxMessageLength > 0 &&
error.Message?.Length > m_maxMessageLength)
{
error = new TemplateValidationError(error.Code, error.Message.Substring(0, m_maxMessageLength) + "[...]");
}
m_errors.Add(error);
}
}
/// <summary>
/// Add a prefix in the error message of the given index.
/// </summary>
public void PrefixMessage(int index, String prefix) {
if (index < 0 || index >= m_errors.Count) {
return;
}
var message = GetMessage(index);
if (!String.IsNullOrEmpty(message))
{
m_errors[index].Message = $"{prefix} {message}";
}
else
{
m_errors[index].Message = $"{prefix}";
}
}
public string GetMessage(int index)
{
if (index < 0 || index >= m_errors.Count)
{
return null;
}
return m_errors[index].Message;
}
/// <summary>
/// Throws <c ref="TemplateValidationException" /> if any errors.
/// </summary>
public void Check()
{
if (m_errors.Count > 0)
{
throw new TemplateValidationException(m_errors);
}
}
/// <summary>
/// Throws <c ref="TemplateValidationException" /> if any errors.
/// <param name="prefix">The error message prefix</param>
/// </summary>
public void Check(String prefix)
{
if (String.IsNullOrEmpty(prefix))
{
this.Check();
}
else if (m_errors.Count > 0)
{
var message = $"{prefix.Trim()} {String.Join(",", m_errors.Select(e => e.Message))}";
throw new TemplateValidationException(message, m_errors);
}
}
public void Clear()
{
m_errors.Clear();
}
public IEnumerator<TemplateValidationError> GetEnumerator()
{
return (m_errors as IEnumerable<TemplateValidationError>).GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return (m_errors as IEnumerable).GetEnumerator();
}
private readonly List<TemplateValidationError> m_errors = new List<TemplateValidationError>();
private readonly Int32 m_maxErrors;
private readonly Int32 m_maxMessageLength;
}
}

View File

@@ -0,0 +1,57 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
public class TemplateValidationException : Exception
{
public TemplateValidationException()
: this(TemplateStrings.TemplateNotValid())
{
}
public TemplateValidationException(IEnumerable<TemplateValidationError> errors)
: this(TemplateStrings.TemplateNotValidWithErrors(string.Join(",", (errors ?? Enumerable.Empty<TemplateValidationError>()).Select(e => e.Message))))
{
m_errors = new List<TemplateValidationError>(errors ?? Enumerable.Empty<TemplateValidationError>());
}
public TemplateValidationException(
String message,
IEnumerable<TemplateValidationError> errors)
: this(message)
{
m_errors = new List<TemplateValidationError>(errors ?? Enumerable.Empty<TemplateValidationError>());
}
public TemplateValidationException(String message)
: base(message)
{
}
public TemplateValidationException(
String message,
Exception innerException)
: base(message, innerException)
{
}
public IList<TemplateValidationError> Errors
{
get
{
if (m_errors == null)
{
m_errors = new List<TemplateValidationError>();
}
return m_errors;
}
}
private List<TemplateValidationError> m_errors;
}
}

View File

@@ -0,0 +1,74 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating
{
/// <summary>
/// Converts from a TemplateToken into another object format
/// </summary>
internal sealed class TemplateWriter
{
internal static void Write(
IObjectWriter objectWriter,
TemplateToken value)
{
objectWriter.WriteStart();
WriteValue(objectWriter, value);
objectWriter.WriteEnd();
}
private static void WriteValue(
IObjectWriter objectWriter,
TemplateToken value)
{
switch (value?.Type ?? TokenType.Null)
{
case TokenType.Null:
objectWriter.WriteNull();
break;
case TokenType.Boolean:
var booleanToken = value as BooleanToken;
objectWriter.WriteBoolean(booleanToken.Value);
break;
case TokenType.Number:
var numberToken = value as NumberToken;
objectWriter.WriteNumber(numberToken.Value);
break;
case TokenType.String:
case TokenType.BasicExpression:
case TokenType.InsertExpression:
objectWriter.WriteString(value.ToString());
break;
case TokenType.Mapping:
var mappingToken = value as MappingToken;
objectWriter.WriteMappingStart();
foreach (var pair in mappingToken)
{
WriteValue(objectWriter, pair.Key);
WriteValue(objectWriter, pair.Value);
}
objectWriter.WriteMappingEnd();
break;
case TokenType.Sequence:
var sequenceToken = value as SequenceToken;
objectWriter.WriteSequenceStart();
foreach (var item in sequenceToken)
{
WriteValue(objectWriter, item);
}
objectWriter.WriteSequenceEnd();
break;
default:
throw new NotSupportedException($"Unexpected type '{value.GetType()}'");
}
}
}
}

View File

@@ -0,0 +1,142 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Globalization;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions;
using GitHub.Actions.Expressions.Sdk;
using GitHub.Actions.Expressions.Sdk.Functions;
using Container = GitHub.Actions.Expressions.Sdk.Container;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
[DataContract]
public sealed class BasicExpressionToken : ExpressionToken
{
public BasicExpressionToken(
Int32? fileId,
Int32? line,
Int32? column,
String expression)
: base(TokenType.BasicExpression, fileId, line, column, null)
{
m_expression = expression;
}
internal String Expression
{
get
{
if (m_expression == null)
{
m_expression = String.Empty;
}
return m_expression;
}
}
public override TemplateToken Clone(Boolean omitSource)
{
return omitSource ? new BasicExpressionToken(null, null, null, m_expression) : new BasicExpressionToken(FileId, Line, Column, m_expression);
}
public override String ToString()
{
return $"{TemplateConstants.OpenExpression} {m_expression} {TemplateConstants.CloseExpression}";
}
public override String ToDisplayString()
{
var expressionParser = new ExpressionParser();
var expressionNode = expressionParser.ValidateSyntax(Expression, null);
if (expressionNode is Format formatNode)
{
// Make sure our first item is indeed a literal string so we can format it.
if (formatNode.Parameters.Count > 1 &&
formatNode.Parameters.First() is Literal literalValueNode &&
literalValueNode.Kind == ValueKind.String)
{
// Get all other Parameters san the formatted string to pass into the formatter
var formatParameters = formatNode.Parameters.Skip(1).Select(x => this.ConvertFormatParameterToExpression(x)).ToArray();
if (formatParameters.Length > 0)
{
String formattedString = String.Empty;
try
{
formattedString = String.Format(CultureInfo.InvariantCulture, (formatNode.Parameters[0] as Literal).Value as String, formatParameters);
}
catch (FormatException) { }
catch (ArgumentNullException) { } // If this operation fails, revert to default display name
if (!String.IsNullOrEmpty(formattedString))
{
return TrimDisplayString(formattedString);
}
}
}
}
return base.ToDisplayString();
}
internal StringToken EvaluateStringToken(
TemplateContext context,
out Int32 bytes)
{
return EvaluateStringToken(context, Expression, out bytes);
}
internal MappingToken EvaluateMappingToken(
TemplateContext context,
out Int32 bytes)
{
return EvaluateMappingToken(context, Expression, out bytes);
}
internal SequenceToken EvaluateSequenceToken(
TemplateContext context,
out Int32 bytes)
{
return EvaluateSequenceToken(context, Expression, out bytes);
}
internal TemplateToken EvaluateTemplateToken(
TemplateContext context,
out Int32 bytes)
{
return EvaluateTemplateToken(context, Expression, out bytes);
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_expression?.Length == 0)
{
m_expression = null;
}
}
private String ConvertFormatParameterToExpression(ExpressionNode node)
{
var nodeString = node.ConvertToExpression();
// If the node is a container, see if it starts with '(' and ends with ')' so we can simplify the string
// Should only simplify if only one '(' or ')' exists in the string
// We are trying to simplify the case (a || b) to a || b
// But we should avoid simplifying ( a && b
if (node is Container &&
nodeString.Length > 2 &&
nodeString[0] == ExpressionConstants.StartParameter &&
nodeString[nodeString.Length - 1] == ExpressionConstants.EndParameter &&
nodeString.Count(character => character == ExpressionConstants.StartParameter) == 1 &&
nodeString.Count(character => character == ExpressionConstants.EndParameter) == 1)
{
nodeString = nodeString = nodeString.Substring(1, nodeString.Length - 2);
}
return String.Concat(TemplateConstants.OpenExpression, " ", nodeString, " ", TemplateConstants.CloseExpression);
}
[DataMember(Name = "expr", EmitDefaultValue = false)]
private String m_expression;
}
}

View File

@@ -0,0 +1,40 @@
using System;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
[DataContract]
public sealed class BooleanToken : LiteralToken, IBoolean
{
public BooleanToken(
Int32? fileId,
Int32? line,
Int32? column,
Boolean value)
: base(TokenType.Boolean, fileId, line, column)
{
m_value = value;
}
public Boolean Value => m_value;
public override TemplateToken Clone(Boolean omitSource)
{
return omitSource ? new BooleanToken(null, null, null, m_value) : new BooleanToken(FileId, Line, Column, m_value);
}
public override String ToString()
{
return m_value ? "true" : "false";
}
Boolean IBoolean.GetBoolean()
{
return Value;
}
[DataMember(Name = "bool", EmitDefaultValue = false)]
private Boolean m_value;
}
}

View File

@@ -0,0 +1,95 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.Serialization;
using System.Text.RegularExpressions;
using GitHub.Actions.Expressions;
using GitHub.Actions.Expressions.Sdk;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
/// <summary>
/// Base class for all template expression tokens
/// </summary>
[DataContract]
public abstract class ExpressionToken : ScalarToken
{
internal ExpressionToken(
Int32 templateType,
Int32? fileId,
Int32? line,
Int32? column,
String directive)
: base(templateType, fileId, line, column)
{
Directive = directive;
}
[DataMember(Name = "directive", EmitDefaultValue = false)]
internal String Directive { get; }
internal static Boolean IsValidExpression(
String expression,
String[] allowedContext,
out Exception ex)
{
// Create dummy named values and functions
var namedValues = new List<INamedValueInfo>();
var functions = new List<IFunctionInfo>();
if (allowedContext?.Length > 0)
{
foreach (var contextItem in allowedContext)
{
var match = s_function.Match(contextItem);
if (match.Success)
{
var functionName = match.Groups[1].Value;
var minParameters = Int32.Parse(match.Groups[2].Value, NumberStyles.None, CultureInfo.InvariantCulture);
var maxParametersRaw = match.Groups[3].Value;
var maxParameters = String.Equals(maxParametersRaw, TemplateConstants.MaxConstant, StringComparison.Ordinal)
? Int32.MaxValue
: Int32.Parse(maxParametersRaw, NumberStyles.None, CultureInfo.InvariantCulture);
functions.Add(new FunctionInfo<DummyFunction>(functionName, minParameters, maxParameters));
}
else
{
namedValues.Add(new NamedValueInfo<ContextValueNode>(contextItem));
}
}
}
// Parse
Boolean result;
ExpressionNode root = null;
try
{
root = new ExpressionParser().CreateTree(expression, null, namedValues, functions) as ExpressionNode;
result = true;
ex = null;
}
catch (Exception exception)
{
result = false;
ex = exception;
}
return result;
}
private sealed class DummyFunction : Function
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
return null;
}
}
private static readonly Regex s_function = new Regex(@"^([a-zA-Z0-9_]+)\(([0-9]+),([0-9]+|MAX)\)$", RegexOptions.Compiled);
}
}

View File

@@ -0,0 +1,27 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
[DataContract]
public sealed class InsertExpressionToken : ExpressionToken
{
public InsertExpressionToken(
Int32? fileId,
Int32? line,
Int32? column)
: base(TokenType.InsertExpression, fileId, line, column, TemplateConstants.InsertDirective)
{
}
public override TemplateToken Clone(Boolean omitSource)
{
return omitSource ? new InsertExpressionToken(null, null, null) : new InsertExpressionToken(FileId, Line, Column);
}
public override String ToString()
{
return $"{TemplateConstants.OpenExpression} insert {TemplateConstants.CloseExpression}";
}
}
}

View File

@@ -0,0 +1,18 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
[DataContract]
public abstract class LiteralToken : ScalarToken
{
public LiteralToken(
Int32 tokenType,
Int32? fileId,
Int32? line,
Int32? column)
: base(tokenType, fileId, line, column)
{
}
}
}

View File

@@ -0,0 +1,245 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Runtime.Serialization;
using System.Threading;
using GitHub.Actions.Expressions.Sdk;
using Newtonsoft.Json;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
[DataContract]
[JsonObject]
public sealed class MappingToken : TemplateToken, IEnumerable<KeyValuePair<ScalarToken, TemplateToken>>, IReadOnlyObject
{
public MappingToken(
Int32? fileId,
Int32? line,
Int32? column)
: base(TokenType.Mapping, fileId, line, column)
{
}
internal Int32 Count => m_items?.Count ?? 0;
// IReadOnlyObject (for expressions)
Int32 IReadOnlyObject.Count
{
get
{
InitializeDictionary();
return m_dictionary.Count;
}
}
// IReadOnlyObject (for expressions)
IEnumerable<String> IReadOnlyObject.Keys
{
get
{
InitializeDictionary();
foreach (var key in m_dictionary.Keys)
{
yield return key as String;
}
}
}
// IReadOnlyObject (for expressions)
IEnumerable<Object> IReadOnlyObject.Values
{
get
{
InitializeDictionary();
foreach (var value in m_dictionary.Values)
{
yield return value;
}
}
}
public KeyValuePair<ScalarToken, TemplateToken> this[Int32 index]
{
get
{
return m_items[index];
}
set
{
m_items[index] = value;
m_dictionary = null;
}
}
// IReadOnlyObject (for expressions)
Object IReadOnlyObject.this[String key]
{
get
{
InitializeDictionary();
return m_dictionary[key];
}
}
public void Add(IEnumerable<KeyValuePair<ScalarToken, TemplateToken>> items)
{
foreach (var item in items)
{
Add(item);
}
}
public void Add(KeyValuePair<ScalarToken, TemplateToken> item)
{
if (m_items == null)
{
m_items = new List<KeyValuePair<ScalarToken, TemplateToken>>();
}
m_items.Add(item);
m_dictionary = null;
}
public void Add(
ScalarToken key,
TemplateToken value)
{
Add(new KeyValuePair<ScalarToken, TemplateToken>(key, value));
}
public override TemplateToken Clone(Boolean omitSource)
{
var result = omitSource ? new MappingToken(null, null, null) : new MappingToken(FileId, Line, Column);
if (m_items?.Count > 0)
{
foreach (var pair in m_items)
{
result.Add(pair.Key?.Clone(omitSource) as ScalarToken, pair.Value?.Clone(omitSource));
}
}
return result;
}
public IEnumerator<KeyValuePair<ScalarToken, TemplateToken>> GetEnumerator()
{
if (m_items?.Count > 0)
{
return m_items.GetEnumerator();
}
else
{
return (new List<KeyValuePair<ScalarToken, TemplateToken>>(0)).GetEnumerator();
}
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
if (m_items?.Count > 0)
{
return m_items.GetEnumerator();
}
else
{
return (new KeyValuePair<ScalarToken, TemplateToken>[0]).GetEnumerator();
}
}
public void Insert(
Int32 index,
KeyValuePair<ScalarToken, TemplateToken> item)
{
if (m_items == null)
{
m_items = new List<KeyValuePair<ScalarToken, TemplateToken>>();
}
m_items.Insert(index, item);
m_dictionary = null;
}
public void Insert(
Int32 index,
ScalarToken key,
TemplateToken value)
{
Insert(index, new KeyValuePair<ScalarToken, TemplateToken>(key, value));
}
public void RemoveAt(Int32 index)
{
m_items.RemoveAt(index);
m_dictionary = null;
}
// IReadOnlyObject (for expressions)
Boolean IReadOnlyObject.ContainsKey(String key)
{
InitializeDictionary();
return m_dictionary.Contains(key);
}
// IReadOnlyObject (for expressions)
IEnumerator IReadOnlyObject.GetEnumerator()
{
InitializeDictionary();
return m_dictionary.GetEnumerator();
}
// IReadOnlyObject (for expressions)
Boolean IReadOnlyObject.TryGetValue(
String key,
out Object value)
{
InitializeDictionary();
if (!m_dictionary.Contains(key))
{
value = null;
return false;
}
value = m_dictionary[key];
return true;
}
/// <summary>
/// Initializes the dictionary used for the expressions IReadOnlyObject interface
/// </summary>
private void InitializeDictionary()
{
if (m_dictionary == null)
{
var dictionary = new OrderedDictionary(StringComparer.OrdinalIgnoreCase);
if (m_items?.Count > 0)
{
foreach (var pair in m_items)
{
if (pair.Key is StringToken stringToken &&
!dictionary.Contains(stringToken.Value))
{
dictionary.Add(stringToken.Value, pair.Value);
}
}
}
Interlocked.CompareExchange(ref m_dictionary, dictionary, null);
}
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_items?.Count == 0)
{
m_items = null;
}
}
[DataMember(Name = "map", EmitDefaultValue = false)]
private List<KeyValuePair<ScalarToken, TemplateToken>> m_items;
private IDictionary m_dictionary;
}
}

View File

@@ -0,0 +1,28 @@
using System;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
[DataContract]
public sealed class NullToken : LiteralToken, INull
{
public NullToken(
Int32? fileId,
Int32? line,
Int32? column)
: base(TokenType.Null, fileId, line, column)
{
}
public override TemplateToken Clone(Boolean omitSource)
{
return omitSource ? new NullToken(null, null, null) : new NullToken(FileId, Line, Column);
}
public override String ToString()
{
return String.Empty;
}
}
}

View File

@@ -0,0 +1,41 @@
using System;
using System.Globalization;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
[DataContract]
public sealed class NumberToken : LiteralToken, INumber
{
public NumberToken(
Int32? fileId,
Int32? line,
Int32? column,
Double value)
: base(TokenType.Number, fileId, line, column)
{
m_value = value;
}
public Double Value => m_value;
public override TemplateToken Clone(Boolean omitSource)
{
return omitSource ? new NumberToken(null, null, null, m_value) : new NumberToken(FileId, Line, Column, m_value);
}
public override String ToString()
{
return m_value.ToString("G15", CultureInfo.InvariantCulture);
}
Double INumber.GetNumber()
{
return Value;
}
[DataMember(Name = "num", EmitDefaultValue = false)]
private Double m_value;
}
}

View File

@@ -0,0 +1,34 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
public abstract class ScalarToken : TemplateToken
{
protected ScalarToken(
Int32 type,
Int32? fileId,
Int32? line,
Int32? column)
: base(type, fileId, line, column)
{
}
public virtual String ToDisplayString()
{
return TrimDisplayString(ToString());
}
protected String TrimDisplayString(String displayString)
{
var firstLine = displayString.TrimStart(' ', '\t', '\r', '\n');
var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' });
if (firstNewLine >= 0)
{
firstLine = firstLine.Substring(0, firstNewLine);
}
return firstLine;
}
}
}

View File

@@ -0,0 +1,149 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
using Newtonsoft.Json;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
[DataContract]
[JsonObject]
public sealed class SequenceToken : TemplateToken, IEnumerable<TemplateToken>, IReadOnlyArray
{
public SequenceToken(
Int32? fileId,
Int32? line,
Int32? column)
: base(TokenType.Sequence, fileId, line, column)
{
}
public Int32 Count => m_items?.Count ?? 0;
public TemplateToken this[Int32 index]
{
get
{
return m_items[index];
}
set
{
m_items[index] = value;
}
}
// IReadOnlyArray (for expressions)
Object IReadOnlyArray.this[Int32 index]
{
get
{
return m_items[index];
}
}
public void Add(TemplateToken value)
{
if (m_items == null)
{
m_items = new List<TemplateToken>();
}
m_items.Add(value);
}
public override TemplateToken Clone(Boolean omitSource)
{
var result = omitSource ? new SequenceToken(null, null, null) : new SequenceToken(FileId, Line, Column);
if (m_items?.Count > 0)
{
foreach (var item in m_items)
{
result.Add(item?.Clone(omitSource));
}
}
return result;
}
public IEnumerator<TemplateToken> GetEnumerator()
{
if (m_items?.Count > 0)
{
return m_items.GetEnumerator();
}
else
{
return (new TemplateToken[0] as IEnumerable<TemplateToken>).GetEnumerator();
}
}
IEnumerator IEnumerable.GetEnumerator()
{
if (m_items?.Count > 0)
{
return m_items.GetEnumerator();
}
else
{
return (new TemplateToken[0] as IEnumerable<TemplateToken>).GetEnumerator();
}
}
// IReadOnlyArray (for expressions)
IEnumerator IReadOnlyArray.GetEnumerator()
{
if (m_items?.Count > 0)
{
return m_items.GetEnumerator();
}
else
{
return (new TemplateToken[0] as IEnumerable<TemplateToken>).GetEnumerator();
}
}
public void Insert(
Int32 index,
TemplateToken item)
{
if (m_items == null)
{
m_items = new List<TemplateToken>();
}
m_items.Insert(index, item);
}
public void InsertRange(
Int32 index,
IEnumerable<TemplateToken> items)
{
if (m_items == null)
{
m_items = new List<TemplateToken>();
}
m_items.InsertRange(index, items);
}
public void RemoveAt(Int32 index)
{
m_items.RemoveAt(index);
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_items?.Count == 0)
{
m_items = null;
}
}
[DataMember(Name = "seq", EmitDefaultValue = false)]
private List<TemplateToken> m_items;
}
}

View File

@@ -0,0 +1,62 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
[DataContract]
public sealed class StringToken : LiteralToken, IString
{
public StringToken(
Int32? fileId,
Int32? line,
Int32? column,
String value)
: base(TokenType.String, fileId, line, column)
{
m_value = value;
}
public String Value
{
get
{
if (m_value == null)
{
m_value = String.Empty;
}
return m_value;
}
}
public override TemplateToken Clone(Boolean omitSource)
{
return omitSource ? new StringToken(null, null, null, m_value) : new StringToken(FileId, Line, Column, m_value);
}
public override String ToString()
{
return m_value ?? String.Empty;
}
String IString.GetString()
{
return Value;
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_value?.Length == 0)
{
m_value = null;
}
}
[DataMember(Name = "lit", EmitDefaultValue = false)]
private String m_value;
}
}

View File

@@ -0,0 +1,292 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions;
using GitHub.Actions.Expressions.Sdk;
using Newtonsoft.Json;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
/// <summary>
/// Base class for all template tokens
/// </summary>
[DataContract]
[JsonConverter(typeof(TemplateTokenJsonConverter))]
public abstract class TemplateToken
{
protected TemplateToken(
Int32 type,
Int32? fileId,
Int32? line,
Int32? column)
{
Type = type;
FileId = fileId;
Line = line;
Column = column;
}
[DataMember(Name = "file", EmitDefaultValue = false)]
internal Int32? FileId { get; private set; }
[DataMember(Name = "line", EmitDefaultValue = false)]
internal Int32? Line { get; private set; }
[DataMember(Name = "col", EmitDefaultValue = false)]
internal Int32? Column { get; private set; }
[DataMember(Name = "type", EmitDefaultValue = false)]
internal Int32 Type { get; }
public TemplateToken Clone()
{
return Clone(false);
}
public abstract TemplateToken Clone(Boolean omitSource);
protected StringToken EvaluateStringToken(
TemplateContext context,
String expression,
out Int32 bytes)
{
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
StrictJsonParsing = context.StrictJsonParsing,
};
var result = tree.Evaluate(context.TraceWriter.ToExpressionTraceWriter(), null, context, options);
if (result.Raw is LiteralToken literalToken)
{
var stringToken = new StringToken(FileId, Line, Column, literalToken.ToString());
context.Memory.AddBytes(stringToken);
return stringToken;
}
if (!result.IsPrimitive)
{
context.Error(this, "Expected a string");
return CreateStringToken(context, expression);
}
var stringValue = result.Kind == ValueKind.Null ? String.Empty : result.ConvertToString();
return CreateStringToken(context, stringValue);
}
finally
{
bytes = context.Memory.CurrentBytes - originalBytes;
}
}
protected SequenceToken EvaluateSequenceToken(
TemplateContext context,
String expression,
out Int32 bytes)
{
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
StrictJsonParsing = context.StrictJsonParsing,
};
var result = tree.Evaluate(context.TraceWriter.ToExpressionTraceWriter(), null, context, options);
var templateToken = ConvertToTemplateToken(context, result);
if (templateToken is SequenceToken sequence)
{
return sequence;
}
context.Error(this, TemplateStrings.ExpectedSequence());
return CreateSequenceToken(context);
}
finally
{
bytes = context.Memory.CurrentBytes - originalBytes;
}
}
protected MappingToken EvaluateMappingToken(
TemplateContext context,
String expression,
out Int32 bytes)
{
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
StrictJsonParsing = context.StrictJsonParsing,
};
var result = tree.Evaluate(context.TraceWriter.ToExpressionTraceWriter(), null, context, options);
var templateToken = ConvertToTemplateToken(context, result);
if (templateToken is MappingToken mapping)
{
return mapping;
}
context.Error(this, TemplateStrings.ExpectedMapping());
return CreateMappingToken(context);
}
finally
{
bytes = context.Memory.CurrentBytes - originalBytes;
}
}
protected TemplateToken EvaluateTemplateToken(
TemplateContext context,
String expression,
out Int32 bytes)
{
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
StrictJsonParsing = context.StrictJsonParsing,
};
var result = tree.Evaluate(context.TraceWriter.ToExpressionTraceWriter(), null, context, options);
return ConvertToTemplateToken(context, result);
}
finally
{
bytes = context.Memory.CurrentBytes - originalBytes;
}
}
private TemplateToken ConvertToTemplateToken(
TemplateContext context,
EvaluationResult result)
{
// Literal
if (TryConvertToLiteralToken(context, result, out LiteralToken literal))
{
return literal;
}
// Known raw types
else if (!Object.ReferenceEquals(result.Raw, null))
{
if (result.Raw is SequenceToken sequence)
{
context.Memory.AddBytes(sequence, true);
return sequence;
}
else if (result.Raw is MappingToken mapping)
{
context.Memory.AddBytes(mapping, true);
return mapping;
}
}
// Leverage the expression SDK to traverse the object
if (result.TryGetCollectionInterface(out Object collection))
{
if (collection is IReadOnlyObject dictionary)
{
var mapping = CreateMappingToken(context);
foreach (KeyValuePair<String, Object> pair in dictionary)
{
var keyToken = CreateStringToken(context, pair.Key);
var valueResult = EvaluationResult.CreateIntermediateResult(null, pair.Value);
var valueToken = ConvertToTemplateToken(context, valueResult);
mapping.Add(keyToken, valueToken);
}
return mapping;
}
else if (collection is IReadOnlyArray list)
{
var sequence = CreateSequenceToken(context);
foreach (var item in list)
{
var itemResult = EvaluationResult.CreateIntermediateResult(null, item);
var itemToken = ConvertToTemplateToken(context, itemResult);
sequence.Add(itemToken);
}
return sequence;
}
}
throw new ArgumentException(TemplateStrings.UnableToConvertToTemplateToken(result.Value?.GetType().FullName));
}
private Boolean TryConvertToLiteralToken(
TemplateContext context,
EvaluationResult result,
out LiteralToken literal)
{
if (result.Raw is LiteralToken literal2)
{
context.Memory.AddBytes(literal2);
literal = literal2;
return true;
}
switch (result.Kind)
{
case ValueKind.Null:
literal = new NullToken(FileId, Line, Column);
break;
case ValueKind.Boolean:
literal = new BooleanToken(FileId, Line, Column, (Boolean)result.Value);
break;
case ValueKind.Number:
literal = new NumberToken(FileId, Line, Column, (Double)result.Value);
break;
case ValueKind.String:
literal = new StringToken(FileId, Line, Column, (String)result.Value);
break;
default:
literal = null;
return false;
}
context.Memory.AddBytes(literal);
return true;
}
private StringToken CreateStringToken(
TemplateContext context,
String value)
{
var result = new StringToken(FileId, Line, Column, value);
context.Memory.AddBytes(result);
return result;
}
private SequenceToken CreateSequenceToken(TemplateContext context)
{
var result = new SequenceToken(FileId, Line, Column);
context.Memory.AddBytes(result);
return result;
}
private MappingToken CreateMappingToken(TemplateContext context)
{
var result = new MappingToken(FileId, Line, Column);
context.Memory.AddBytes(result);
return result;
}
}
}

View File

@@ -0,0 +1,291 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.Actions.Expressions;
using GitHub.Actions.Expressions.Sdk;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
internal static class TemplateTokenExtensions
{
internal static BooleanToken AssertBoolean(
this TemplateToken value,
string objectDescription)
{
if (value is BooleanToken booleanToken)
{
return booleanToken;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(BooleanToken)}' was expected.");
}
internal static NullToken AssertNull(
this TemplateToken value,
string objectDescription)
{
if (value is NullToken nullToken)
{
return nullToken;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(NullToken)}' was expected.");
}
internal static NumberToken AssertNumber(
this TemplateToken value,
string objectDescription)
{
if (value is NumberToken numberToken)
{
return numberToken;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(NumberToken)}' was expected.");
}
internal static StringToken AssertString(
this TemplateToken value,
string objectDescription)
{
if (value is StringToken stringToken)
{
return stringToken;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(StringToken)}' was expected.");
}
internal static MappingToken AssertMapping(
this TemplateToken value,
string objectDescription)
{
if (value is MappingToken mapping)
{
return mapping;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(MappingToken)}' was expected.");
}
internal static ScalarToken AssertScalar(
this TemplateToken value,
string objectDescription)
{
if (value is ScalarToken scalar)
{
return scalar;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(ScalarToken)}' was expected.");
}
internal static SequenceToken AssertSequence(
this TemplateToken value,
string objectDescription)
{
if (value is SequenceToken sequence)
{
return sequence;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(SequenceToken)}' was expected.");
}
internal static void AssertUnexpectedValue(
this LiteralToken literal,
string objectDescription)
{
throw new ArgumentException($"Error while reading '{objectDescription}'. Unexpected value '{literal.ToString()}'");
}
/// <summary>
/// Traverses the token and checks whether all required expression values
/// and functions are provided.
/// </summary>
public static bool CheckHasRequiredContext(
this TemplateToken token,
IReadOnlyObject expressionValues,
IList<IFunctionInfo> expressionFunctions)
{
var expressionTokens = token.Traverse()
.OfType<BasicExpressionToken>()
.ToArray();
var parser = new ExpressionParser();
foreach (var expressionToken in expressionTokens)
{
var tree = parser.ValidateSyntax(expressionToken.Expression, null);
foreach (var node in tree.Traverse())
{
if (node is NamedValue namedValue)
{
if (expressionValues?.Keys.Any(x => string.Equals(x, namedValue.Name, StringComparison.OrdinalIgnoreCase)) != true)
{
return false;
}
}
else if (node is Function function &&
!ExpressionConstants.WellKnownFunctions.ContainsKey(function.Name) &&
expressionFunctions?.Any(x => string.Equals(x.Name, function.Name, StringComparison.OrdinalIgnoreCase)) != true)
{
return false;
}
}
}
return true;
}
/// <summary>
/// Traverses each token that is provided (including descendants) and
/// checks whether specific contexts or sub-properties of contexts are referenced.
/// If a conclusive determination cannot be made, then the pattern is considered matched.
/// For example, the expression "toJson(github)" matches the pattern "github.event" because
/// the value is passed to a function. Not enough information is known to determine whether
/// the function requires the sub-property. Therefore, it is assumed that it may.
///
/// Wildcards are supported in the pattern, and are treated as matching any literal.
/// For example, the expression "needs.my-job.outputs.my-output" matches the pattern "needs.*.outputs".
/// </summary>
public static bool[] CheckReferencesContext(
this IList<TemplateToken> tokens,
params string[] patterns)
{
var result = new bool[patterns.Length];
var expressionTokens = tokens
.SelectMany(x => x.Traverse())
.OfType<BasicExpressionToken>()
.ToArray();
var parser = new ExpressionParser();
foreach (var expressionToken in expressionTokens)
{
var tree = parser.ValidateSyntax(expressionToken.Expression, null);
var isReferenced = tree.CheckReferencesContext(patterns);
for (var i = 0; i < patterns.Length; i++)
{
if (isReferenced[i])
{
result[i] = true;
}
}
}
return result;
}
/// <summary>
/// Returns all tokens (depth first)
/// </summary>
public static IEnumerable<TemplateToken> Traverse(this TemplateToken token)
{
return Traverse(token, omitKeys: false);
}
/// <summary>
/// Returns all tokens (depth first)
/// </summary>
public static IEnumerable<TemplateToken> Traverse(
this TemplateToken token,
bool omitKeys)
{
if (token != null)
{
yield return token;
if (token is SequenceToken || token is MappingToken)
{
var state = new TraversalState(null, token);
while (state != null)
{
if (state.MoveNext(omitKeys))
{
token = state.Current;
yield return token;
if (token is SequenceToken || token is MappingToken)
{
state = new TraversalState(state, token);
}
}
else
{
state = state.Parent;
}
}
}
}
}
private sealed class TraversalState
{
public TraversalState(
TraversalState parent,
TemplateToken token)
{
Parent = parent;
m_token = token;
}
public bool MoveNext(bool omitKeys)
{
switch (m_token.Type)
{
case TokenType.Sequence:
var sequence = m_token as SequenceToken;
if (++m_index < sequence.Count)
{
Current = sequence[m_index];
return true;
}
else
{
Current = null;
return false;
}
case TokenType.Mapping:
var mapping = m_token as MappingToken;
// Return the value
if (m_isKey)
{
m_isKey = false;
Current = mapping[m_index].Value;
return true;
}
if (++m_index < mapping.Count)
{
// Skip the key, return the value
if (omitKeys)
{
m_isKey = false;
Current = mapping[m_index].Value;
return true;
}
// Return the key
m_isKey = true;
Current = mapping[m_index].Key;
return true;
}
Current = null;
return false;
default:
throw new NotSupportedException($"Unexpected token type '{m_token.Type}'");
}
}
private TemplateToken m_token;
private int m_index = -1;
private bool m_isKey;
public TemplateToken Current;
public TraversalState Parent;
}
}
}

View File

@@ -0,0 +1,370 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Reflection;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
/// <summary>
/// JSON serializer for TemplateToken objects
/// </summary>
internal sealed class TemplateTokenJsonConverter : JsonConverter
{
public override Boolean CanWrite
{
get
{
return true;
}
}
public override Boolean CanConvert(Type objectType)
{
return typeof(TemplateToken).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo());
}
public override Object ReadJson(
JsonReader reader,
Type objectType,
Object existingValue,
JsonSerializer serializer)
{
switch (reader.TokenType)
{
case JsonToken.String:
return new StringToken(null, null, null, reader.Value.ToString());
case JsonToken.Boolean:
return new BooleanToken(null, null, null, (Boolean)reader.Value);
case JsonToken.Float:
return new NumberToken(null, null, null, (Double)reader.Value);
case JsonToken.Integer:
return new NumberToken(null, null, null, (Double)(Int64)reader.Value);
case JsonToken.Null:
return new NullToken(null, null, null);
case JsonToken.StartObject:
break;
default:
return null;
}
Int32? type = null;
JObject value = JObject.Load(reader);
if (!value.TryGetValue("type", StringComparison.OrdinalIgnoreCase, out JToken typeValue))
{
type = TokenType.String;
}
else if (typeValue.Type == JTokenType.Integer)
{
type = (Int32)typeValue;
}
else
{
return existingValue;
}
Object newValue = null;
switch (type)
{
case TokenType.Null:
newValue = new NullToken(null, null, null);
break;
case TokenType.Boolean:
newValue = new BooleanToken(null, null, null, default(Boolean));
break;
case TokenType.Number:
newValue = new NumberToken(null, null, null, default(Double));
break;
case TokenType.String:
newValue = new StringToken(null, null, null, null);
break;
case TokenType.BasicExpression:
newValue = new BasicExpressionToken(null, null, null, null);
break;
case TokenType.InsertExpression:
newValue = new InsertExpressionToken(null, null, null);
break;
case TokenType.Sequence:
newValue = new SequenceToken(null, null, null);
break;
case TokenType.Mapping:
newValue = new MappingToken(null, null, null);
break;
}
if (value != null)
{
using JsonReader objectReader = value.CreateReader();
serializer.Populate(objectReader, newValue);
}
return newValue;
}
public override void WriteJson(
JsonWriter writer,
Object value,
JsonSerializer serializer)
{
if (value is TemplateToken token)
{
switch (token.Type)
{
case TokenType.Null:
if (token.FileId == null && token.Line == null && token.Column == null)
{
writer.WriteNull();
}
else
{
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
}
writer.WriteEndObject();
}
return;
case TokenType.Boolean:
var booleanToken = token as BooleanToken;
if (token.FileId == null && token.Line == null && token.Column == null)
{
writer.WriteValue(booleanToken.Value);
}
else
{
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
}
writer.WritePropertyName("bool");
writer.WriteValue(booleanToken.Value);
writer.WriteEndObject();
}
return;
case TokenType.Number:
var numberToken = token as NumberToken;
if (token.FileId == null && token.Line == null && token.Column == null)
{
writer.WriteValue(numberToken.Value);
}
else
{
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
}
writer.WritePropertyName("num");
writer.WriteValue(numberToken.Value);
writer.WriteEndObject();
}
return;
case TokenType.String:
var stringToken = token as StringToken;
if (token.FileId == null && token.Line == null && token.Column == null)
{
writer.WriteValue(stringToken.Value);
}
else
{
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
}
writer.WritePropertyName("lit");
writer.WriteValue(stringToken.Value);
writer.WriteEndObject();
}
return;
case TokenType.BasicExpression:
var basicExpressionToken = token as BasicExpressionToken;
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
}
if (!String.IsNullOrEmpty(basicExpressionToken.Expression))
{
writer.WritePropertyName("expr");
writer.WriteValue(basicExpressionToken.Expression);
}
writer.WriteEndObject();
return;
case TokenType.InsertExpression:
var insertExpressionToken = token as InsertExpressionToken;
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
}
writer.WritePropertyName("directive");
writer.WriteValue(insertExpressionToken.Directive);
writer.WriteEndObject();
return;
case TokenType.Sequence:
var sequenceToken = token as SequenceToken;
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
}
if (sequenceToken.Count > 0)
{
writer.WritePropertyName("seq");
writer.WriteStartArray();
foreach (var item in sequenceToken)
{
serializer.Serialize(writer, item);
}
writer.WriteEndArray();
}
writer.WriteEndObject();
return;
case TokenType.Mapping:
var mappingToken = token as MappingToken;
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
}
if (mappingToken.Count > 0)
{
writer.WritePropertyName("map");
writer.WriteStartArray();
foreach (var item in mappingToken)
{
serializer.Serialize(writer, item);
}
writer.WriteEndArray();
}
writer.WriteEndObject();
return;
}
}
throw new NotSupportedException($"Unexpected type '{value?.GetType().FullName}' when serializing template token");
}
}
}

View File

@@ -0,0 +1,23 @@
using System;
namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
{
internal static class TokenType
{
internal const Int32 String = 0;
internal const Int32 Sequence = 1;
internal const Int32 Mapping = 2;
internal const Int32 BasicExpression = 3;
internal const Int32 InsertExpression = 4;
internal const Int32 Boolean = 5;
internal const Int32 Number = 6;
internal const Int32 Null = 7;
}
}

View File

@@ -0,0 +1,81 @@
namespace GitHub.Actions.WorkflowParser
{
public sealed class ParseOptions
{
public ParseOptions()
{
}
internal ParseOptions(ParseOptions copy)
{
AllowAnchors = copy.AllowAnchors;
MaxDepth = copy.MaxDepth;
MaxFiles = copy.MaxFiles;
MaxFileSize = copy.MaxFileSize;
MaxJobLimit = copy.MaxJobLimit;
MaxNestedReusableWorkflowsDepth = copy.MaxNestedReusableWorkflowsDepth;
MaxResultSize = copy.MaxResultSize;
SkipReusableWorkflows = copy.SkipReusableWorkflows;
}
/// <summary>
/// Gets or sets a value indicating whether YAML anchors are allowed.
/// </summary>
public bool AllowAnchors { get; set; }
/// <summary>
/// Gets or sets the maximum element depth when parsing a workflow.
/// </summary>
public int MaxDepth { get; set; } = 50;
/// <summary>
/// Gets the maximum error message length before the message will be truncated.
/// </summary>
public int MaxErrorMessageLength => 500;
/// <summary>
/// Gets the maximum number of errors that can be recorded when parsing a workflow.
/// </summary>
public int MaxErrors => 10;
/// <summary>
/// Gets or sets the maximum number of files that can be loaded when parsing a workflow. Zero or less is treated as infinite.
/// </summary>
public int MaxFiles { get; set; } = 51; // 1 initial caller + max 50 reusable workflow references
/// <summary>
/// Gets or set the maximum number of characters a file can contain when parsing a workflow.
/// </summary>
public int MaxFileSize { get; set; } = 1024 * 1024;
/// <summary>
/// Gets or sets the maximum number of internal parsing events. This concept was initially
/// introduced to prevent infinite loops from user-controlled looping constructs. However,
/// we no longer have looping constructs.
///
/// This concept can be removed.
/// </summary>
public int MaxParseEvents => 1000000; // 1 million
/// <summary>
/// Gets or sets the maximum number of jobs that can be defined in a workflow (includes nested workflows).
/// Zero or less is treated as infinite.
/// </summary>
public int MaxJobLimit { get; set; }
/// <summary>
/// Gets or sets the maximum workflow nest depth. Zero indicates reusable workflows are not allowed.
/// </summary>
public int MaxNestedReusableWorkflowsDepth { get; set; }
/// <summary>
/// Gets or sets the maximum size of the result in bytes.
/// </summary>
public int MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb
/// <summary>
/// Gets or sets a value indicating whether to skip loading reusable workflows.
/// </summary>
public bool SkipReusableWorkflows { get; set; }
}
}

View File

@@ -0,0 +1,9 @@
namespace GitHub.Actions.WorkflowParser
{
public enum PermissionLevel
{
NoAccess = 0, // Default value
Read,
Write,
}
}

View File

@@ -0,0 +1,220 @@
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.Conversion;
using Newtonsoft.Json;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public class Permissions
{
[JsonConstructor]
public Permissions()
{
}
public Permissions(Permissions copy)
{
Actions = copy.Actions;
ArtifactMetadata = copy.ArtifactMetadata;
Attestations = copy.Attestations;
Checks = copy.Checks;
Contents = copy.Contents;
Deployments = copy.Deployments;
Issues = copy.Issues;
Discussions = copy.Discussions;
Packages = copy.Packages;
Pages = copy.Pages;
PullRequests = copy.PullRequests;
RepositoryProjects = copy.RepositoryProjects;
Statuses = copy.Statuses;
SecurityEvents = copy.SecurityEvents;
IdToken = copy.IdToken;
Models = copy.Models;
}
public Permissions(
PermissionLevel permissionLevel,
bool includeIdToken,
bool includeAttestations,
bool includeModels)
{
Actions = permissionLevel;
ArtifactMetadata = permissionLevel;
Attestations = includeAttestations ? permissionLevel : PermissionLevel.NoAccess;
Checks = permissionLevel;
Contents = permissionLevel;
Deployments = permissionLevel;
Issues = permissionLevel;
Discussions = permissionLevel;
Packages = permissionLevel;
Pages = permissionLevel;
PullRequests = permissionLevel;
RepositoryProjects = permissionLevel;
Statuses = permissionLevel;
SecurityEvents = permissionLevel;
IdToken = includeIdToken ? permissionLevel : PermissionLevel.NoAccess;
// Models must not have higher permissions than Read
Models = includeModels
? (permissionLevel == PermissionLevel.Write ? PermissionLevel.Read : permissionLevel)
: PermissionLevel.NoAccess;
}
private static KeyValuePair<string, (PermissionLevel, PermissionLevel)>[] ComparisonKeyMapping(Permissions left, Permissions right)
{
return new[]
{
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("actions", (left.Actions, right.Actions)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("artifact-metadata", (left.ArtifactMetadata, right.ArtifactMetadata)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("attestations", (left.Attestations, right.Attestations)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("checks", (left.Checks, right.Checks)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("contents", (left.Contents, right.Contents)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("deployments", (left.Deployments, right.Deployments)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("discussions", (left.Discussions, right.Discussions)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("issues", (left.Issues, right.Issues)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("packages", (left.Packages, right.Packages)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("pages", (left.Pages, right.Pages)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("pull-requests", (left.PullRequests, right.PullRequests)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("repository-projects", (left.RepositoryProjects, right.RepositoryProjects)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("statuses", (left.Statuses, right.Statuses)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("security-events", (left.SecurityEvents, right.SecurityEvents)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("id-token", (left.IdToken, right.IdToken)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("models", (left.Models, right.Models)),
};
}
[DataMember(Name = "actions", EmitDefaultValue = false)]
public PermissionLevel Actions
{
get;
set;
}
[DataMember(Name = "artifact-metadata", EmitDefaultValue = false)]
public PermissionLevel ArtifactMetadata
{
get;
set;
}
[DataMember(Name = "attestations", EmitDefaultValue = false)]
public PermissionLevel Attestations
{
get;
set;
}
[DataMember(Name = "checks", EmitDefaultValue = false)]
public PermissionLevel Checks
{
get;
set;
}
[DataMember(Name = "contents", EmitDefaultValue = false)]
public PermissionLevel Contents
{
get;
set;
}
[DataMember(Name = "deployments", EmitDefaultValue = false)]
public PermissionLevel Deployments
{
get;
set;
}
[DataMember(Name = "discussions", EmitDefaultValue = false)]
public PermissionLevel Discussions
{
get;
set;
}
[DataMember(Name = "id-token", EmitDefaultValue = false)]
public PermissionLevel IdToken
{
get;
set;
}
[DataMember(Name = "issues", EmitDefaultValue = false)]
public PermissionLevel Issues
{
get;
set;
}
[DataMember(Name = "models", EmitDefaultValue = false)]
public PermissionLevel Models
{
get;
set;
}
[DataMember(Name = "packages", EmitDefaultValue = false)]
public PermissionLevel Packages
{
get;
set;
}
[DataMember(Name = "pages", EmitDefaultValue = false)]
public PermissionLevel Pages
{
get;
set;
}
[DataMember(Name = "pull-requests", EmitDefaultValue = false)]
public PermissionLevel PullRequests
{
get;
set;
}
[DataMember(Name = "repository-projects", EmitDefaultValue = false)]
public PermissionLevel RepositoryProjects
{
get;
set;
}
[DataMember(Name = "security-events", EmitDefaultValue = false)]
public PermissionLevel SecurityEvents
{
get;
set;
}
[DataMember(Name = "statuses", EmitDefaultValue = false)]
public PermissionLevel Statuses
{
get;
set;
}
public Permissions Clone()
{
return new Permissions(this);
}
internal bool ViolatesMaxPermissions(Permissions maxPermissions, out List<PermissionLevelViolation> permissionsViolations)
{
var mapping = Permissions.ComparisonKeyMapping(this, maxPermissions);
permissionsViolations = new List<PermissionLevelViolation>();
foreach (var (key, (permissionLevel, maxPermissionLevel)) in mapping)
{
if (!permissionLevel.IsLessThanOrEqualTo(maxPermissionLevel))
{
permissionsViolations.Add(new PermissionLevelViolation(key, permissionLevel, maxPermissionLevel));
}
}
return permissionsViolations.Count > 0;
}
}
}

View File

@@ -0,0 +1,167 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using System.Text.Json.Serialization;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public sealed class ReferencedWorkflow
{
[JsonConstructor]
public ReferencedWorkflow()
{
}
private ReferencedWorkflow(ReferencedWorkflow infoToClone)
{
this.CallingWorkflowRef = infoToClone.CallingWorkflowRef;
this.CallingWorkflowSha = infoToClone.CallingWorkflowSha;
this.Repository = infoToClone.Repository;
this.RepositoryId = infoToClone.RepositoryId;
this.TenantId = infoToClone.TenantId;
this.ResolvedRef = infoToClone.ResolvedRef;
this.ResolvedSha = infoToClone.ResolvedSha;
this.WorkflowRef = infoToClone.WorkflowRef;
this.WorkflowFileFullPath = infoToClone.WorkflowFileFullPath;
this.m_data = new Dictionary<string, string>(infoToClone.Data, StringComparer.OrdinalIgnoreCase);
}
/// <summary>
/// Gets or sets the repository's NWO, ex: <org>/<repo>
/// </summary>
[DataMember]
public string Repository { get; set; }
/// <summary>
/// Gets or sets the repository's GitHub global relay id
/// </summary>
[DataMember]
public string RepositoryId { get; set; }
/// <summary>
/// Gets or sets the branch/tag ref that was resolved to the calling workflow file
/// refs/tags/ or refs/heads/
/// This could be empty if the calling workflow file was referenced directly via commit SHA, or if there is no calling workflow
/// </summary>
[DataMember]
public string CallingWorkflowRef { get; set; }
/// <summary>
/// Gets or sets the commit SHA for the calling workflow file
/// This is empty if there is no calling workflow
/// </summary>
[DataMember]
public string CallingWorkflowSha { get; set; }
/// <summary>
/// Gets or sets the repository's Actions tenant HostId
/// </summary>
[DataMember]
public Guid TenantId { get; set; }
/// <summary>
/// Gets or sets the branch/tag ref that was resolved to the workflow file
/// refs/tags/ or refs/heads/
/// This could be empty if the workflow file was referenced directly via commit SHA
/// </summary>
[DataMember]
public string ResolvedRef { get; set; }
/// <summary>
/// Gets or sets the commit SHA for the workflow file
/// </summary>
[DataMember]
public string ResolvedSha { get; set; }
/// <summary>
/// Gets or sets the full path to the workflow file
/// owner/repo/path/to/workflow.yml
/// </summary>
[DataMember]
public string WorkflowFileFullPath { get; set; }
/// <summary>
/// Gets or sets the workflow ref.
// for a callable workflow:
/// owner/repo/path/to/workflow.yml@ref
/// for main workflow file:
/// path/to/workflow.yml
/// </summary>
[DataMember]
public string WorkflowRef { get; set; }
[IgnoreDataMember]
public string CanonicalWorkflowRef
{
get
{
// When ResolvedRef is not empty, the workflow ref was like "uses: my-org/my-repo/.github/workflows/foo.yml@main".
// Otherwise the workflow ref was like "uses: my-org/my-repo/.github/workflows/foo.yml@664bf207624be1e27b36b04c058d01893570f45c"
return string.Concat(
this.WorkflowFileFullPath,
"@",
!string.IsNullOrEmpty(this.ResolvedRef) ? this.ResolvedRef : this.ResolvedSha);
}
}
public Dictionary<string, string> Data
{
get
{
if (m_data == null)
{
m_data = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
}
return m_data;
}
}
public ReferencedWorkflow Clone()
{
return new ReferencedWorkflow(this);
}
public bool IsTrusted()
{
if (Data.TryGetValue("IsTrusted", out var isTrusted))
{
return string.Equals(isTrusted, bool.TrueString, StringComparison.OrdinalIgnoreCase);
}
return false;
}
public bool IsRequiredWorkflow()
{
if (Data.TryGetValue("IsRequiredWorkflow", out var isRequiredWorkflow))
{
return string.Equals(isRequiredWorkflow, bool.TrueString, StringComparison.OrdinalIgnoreCase);
}
return false;
}
public string GetPlanOwnerId()
{
if (Data.TryGetValue("PlanOwnerId", out var planOwnerId))
{
return planOwnerId;
}
return null;
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_data?.Count == 0)
{
m_data = null;
}
}
[DataMember(Name = "Data", EmitDefaultValue = false)]
private Dictionary<string, string> m_data;
}
}

View File

@@ -0,0 +1,12 @@
using System;
namespace GitHub.Actions.WorkflowParser
{
public class ReferencedWorkflowNotFoundException : Exception
{
public ReferencedWorkflowNotFoundException(String message)
: base(message)
{
}
}
}

View File

@@ -0,0 +1,192 @@
#nullable enable
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public sealed class ReusableWorkflowJob : IJob
{
[DataMember(Order = 0, Name = "type", EmitDefaultValue = false)]
public JobType Type
{
get
{
return JobType.ReusableWorkflowJob;
}
}
[DataMember(Order = 1, Name = "id", EmitDefaultValue = false)]
public StringToken? Id
{
get;
set;
}
[DataMember(Order = 2, Name = "name", EmitDefaultValue = false)]
public ScalarToken? Name
{
get;
set;
}
[IgnoreDataMember]
public IList<StringToken> Needs
{
get
{
if (m_needs == null)
{
m_needs = new List<StringToken>();
}
return m_needs;
}
}
[DataMember(Order = 4, Name = "if", EmitDefaultValue = false)]
public BasicExpressionToken? If
{
get;
set;
}
[DataMember(Order = 5, Name = "ref", EmitDefaultValue = false)]
public StringToken? Ref
{
get;
set;
}
[DataMember(Order = 6, Name = "permissions", EmitDefaultValue = false)]
public Permissions? Permissions
{
get;
set;
}
[DataMember(Order = 7, Name = "input-definitions", EmitDefaultValue = false)]
public MappingToken? InputDefinitions
{
get;
set;
}
[DataMember(Order = 8, Name = "input-values", EmitDefaultValue = false)]
public MappingToken? InputValues
{
get;
set;
}
[DataMember(Order = 9, Name = "secret-definitions", EmitDefaultValue = false)]
public MappingToken? SecretDefinitions
{
get;
set;
}
[DataMember(Order = 10, Name = "secret-values", EmitDefaultValue = false)]
public MappingToken? SecretValues
{
get;
set;
}
[DataMember(Order = 11, Name = "inherit-secrets", EmitDefaultValue = false)]
public bool InheritSecrets
{
get;
set;
}
[DataMember(Order = 12, Name = "outputs", EmitDefaultValue = false)]
public MappingToken? Outputs
{
get;
set;
}
[DataMember(Order = 13, Name = "defaults", EmitDefaultValue = false)]
public TemplateToken? Defaults
{
get;
set;
}
[DataMember(Order = 14, Name = "env", EmitDefaultValue = false)]
public TemplateToken? Env
{
get;
set;
}
[DataMember(Order = 15, Name = "concurrency", EmitDefaultValue = false)]
public TemplateToken? Concurrency
{
get;
set;
}
[DataMember(Order = 16, Name = "embedded-concurrency", EmitDefaultValue = false)]
public TemplateToken? EmbeddedConcurrency
{
get;
set;
}
[DataMember(Order = 17, Name = "strategy", EmitDefaultValue = false)]
public TemplateToken? Strategy
{
get;
set;
}
[IgnoreDataMember]
public IList<IJob> Jobs
{
get
{
if (m_jobs == null)
{
m_jobs = new List<IJob>();
}
return m_jobs;
}
}
public IJob Clone(bool omitSource)
{
var result = new ReusableWorkflowJob
{
Concurrency = Concurrency?.Clone(omitSource),
Defaults = Defaults?.Clone(omitSource),
Name = Name?.Clone(omitSource) as ScalarToken,
EmbeddedConcurrency = EmbeddedConcurrency?.Clone(omitSource),
Env = Env?.Clone(omitSource),
Id = Id?.Clone(omitSource) as StringToken,
If = If?.Clone(omitSource) as BasicExpressionToken,
InheritSecrets = InheritSecrets,
InputDefinitions = InputDefinitions?.Clone(omitSource) as MappingToken,
InputValues = InputValues?.Clone(omitSource) as MappingToken,
Outputs = Outputs?.Clone(omitSource) as MappingToken,
Permissions = Permissions?.Clone(),
Ref = Ref?.Clone(omitSource) as StringToken,
SecretDefinitions = SecretDefinitions?.Clone(omitSource) as MappingToken,
SecretValues = SecretValues?.Clone(omitSource) as MappingToken,
Strategy = Strategy?.Clone(omitSource),
};
result.Jobs.AddRange(Jobs.Select(x => x.Clone(omitSource)));
result.Needs.AddRange(Needs.Select(x => (x.Clone(omitSource) as StringToken)!));
return result;
}
[DataMember(Order = 3, Name = "needs", EmitDefaultValue = false)]
private List<StringToken>? m_needs;
[DataMember(Order = 18, Name = "jobs", EmitDefaultValue = false)]
private List<IJob>? m_jobs;
}
}

View File

@@ -0,0 +1,93 @@
#nullable enable
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public sealed class RunStep : IStep
{
[DataMember(Order = 0, Name = "id", EmitDefaultValue = false)]
public string? Id
{
get;
set;
}
/// <summary>
/// Gets or sets the display name
/// </summary>
[DataMember(Order = 1, Name = "name", EmitDefaultValue = false)]
public ScalarToken? Name
{
get;
set;
}
[DataMember(Order = 2, Name = "if", EmitDefaultValue = false)]
public BasicExpressionToken? If
{
get;
set;
}
[DataMember(Order = 3, Name = "continue-on-error", EmitDefaultValue = false)]
public ScalarToken? ContinueOnError
{
get;
set;
}
[DataMember(Order = 4, Name = "timeout-minutes", EmitDefaultValue = false)]
public ScalarToken? TimeoutMinutes
{
get;
set;
}
[DataMember(Order = 5, Name = "env", EmitDefaultValue = false)]
public TemplateToken? Env
{
get;
set;
}
[DataMember(Order = 6, Name = "working-directory", EmitDefaultValue = false)]
public ScalarToken? WorkingDirectory
{
get;
set;
}
[DataMember(Order = 7, Name = "shell", EmitDefaultValue = false)]
public ScalarToken? Shell
{
get;
set;
}
[DataMember(Order = 8, Name = "run", EmitDefaultValue = false)]
public ScalarToken? Run
{
get;
set;
}
public IStep Clone(bool omitSource)
{
return new RunStep
{
ContinueOnError = ContinueOnError?.Clone(omitSource) as ScalarToken,
Env = Env?.Clone(omitSource),
Id = Id,
If = If?.Clone(omitSource) as BasicExpressionToken,
Name = Name?.Clone(omitSource) as ScalarToken,
Run = Run?.Clone(omitSource) as ScalarToken,
Shell = Shell?.Clone(omitSource) as ScalarToken,
TimeoutMinutes = TimeoutMinutes?.Clone(omitSource) as ScalarToken,
WorkingDirectory = WorkingDirectory?.Clone(omitSource) as ScalarToken,
};
}
}
}

View File

@@ -0,0 +1,39 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public class RunsOn
{
public HashSet<string> Labels
{
get
{
if (m_labels == null)
{
m_labels = new HashSet<string>();
}
return m_labels;
}
}
[DataMember(EmitDefaultValue = false)]
public String RunnerGroup { get; set; }
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_labels?.Count == 0)
{
m_labels = null;
}
}
[DataMember(Name = "Labels", EmitDefaultValue = false)]
private HashSet<string> m_labels;
}
}

View File

@@ -0,0 +1,24 @@
#nullable enable
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public class Snapshot
{
[DataMember(EmitDefaultValue = false)]
public required string ImageName { get; set; }
[DataMember(EmitDefaultValue = false)]
public BasicExpressionToken? If
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public required string Version { get; set; }
}
}

View File

@@ -0,0 +1,7 @@
namespace GitHub.Actions.WorkflowParser;
public enum StepType
{
ActionStep,
RunStep,
}

View File

@@ -0,0 +1,39 @@
#nullable enable
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public sealed class Strategy
{
public Strategy()
{
FailFast = true;
}
[DataMember(Name = "failFast", EmitDefaultValue = true)]
public Boolean FailFast { get; set; }
[DataMember(Name = "maxParallel", EmitDefaultValue = false)]
public int MaxParallel { get; set; }
[IgnoreDataMember]
public List<StrategyConfiguration> Configurations
{
get
{
if (m_configuration is null)
{
m_configuration = new List<StrategyConfiguration>();
}
return m_configuration;
}
}
[DataMember(Name = "configuration", EmitDefaultValue = false)]
private List<StrategyConfiguration>? m_configuration;
}
}

View File

@@ -0,0 +1,38 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Data;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public sealed class StrategyConfiguration
{
/// <summary>
/// Gets or sets the display name
/// </summary>
[DataMember(Name = "name", EmitDefaultValue = false)]
public String Name { get; set; }
[DataMember(Name = "id", EmitDefaultValue = false)]
public String Id { get; set; }
[IgnoreDataMember]
public Dictionary<String, ExpressionData> ExpressionData
{
get
{
if (m_expressionData is null)
{
m_expressionData = new Dictionary<String, ExpressionData>(StringComparer.Ordinal);
}
return m_expressionData;
}
}
[DataMember(Name = "expressionData", EmitDefaultValue = false)]
private Dictionary<String, ExpressionData> m_expressionData;
}
}

View File

@@ -0,0 +1,70 @@
using System;
using GitHub.Actions.WorkflowParser.Conversion;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
namespace GitHub.Actions.WorkflowParser
{
/// <summary>
/// Extension methods for <see cref="TemplateContext"/>
/// </summary>
internal static class TemplateContextExtensions
{
/// <summary>
/// Stores the <see cref="WorkflowFeatures"/> in the <see cref="TemplateContext"/> state.
/// </summary>
public static void SetFeatures(
this TemplateContext context,
WorkflowFeatures features)
{
context.State[s_featuresKey] = features;
}
/// <summary>
/// Gets the <see cref="WorkflowFeatures"/> from the <see cref="TemplateContext"/> state.
/// </summary>
public static WorkflowFeatures GetFeatures(this TemplateContext context)
{
if (context.State.TryGetValue(s_featuresKey, out var value) &&
value is WorkflowFeatures features)
{
return features;
}
throw new ArgumentNullException(nameof(WorkflowFeatures));
}
/// <summary>
/// Stores the <see cref="JobCountValidator"/> in the <see cref="TemplateContext"/> state.
/// </summary>
public static void SetJobCountValidator(
this TemplateContext context,
JobCountValidator validator)
{
context.State[s_jobCountValidatorKey] = validator;
}
/// <summary>
/// Gets the <see cref="JobCountValidator"/> from the <see cref="TemplateContext"/> state.
/// </summary>
public static JobCountValidator GetJobCountValidator(this TemplateContext context)
{
if (context.State.TryGetValue(s_jobCountValidatorKey, out var value) &&
value is JobCountValidator validator)
{
return validator;
}
throw new ArgumentNullException(nameof(JobCountValidator));
}
/// <summary>
/// Lookup key for the <see cref="WorkflowFeatures"/> object within the state dictionary.
/// </summary>
private static readonly string s_featuresKey = typeof(WorkflowFeatures).FullName!;
/// <summary>
/// Lookup key for the <see cref="JobCountValidator"/> object within the state dictionary.
/// </summary>
private static readonly string s_jobCountValidatorKey = typeof(JobCountValidator).FullName!;
}
}

View File

@@ -0,0 +1,39 @@
using System;
namespace GitHub.Actions.WorkflowParser
{
public static class WorkflowConstants
{
/// <summary>
/// The default job cancel timeout in minutes.
/// </summary>
internal const Int32 DefaultJobCancelTimeoutInMinutes = 5;
/// <summary>
/// The default job name. This job name is used when a job does not leverage multipliers
/// or slicing and only has one implicit job.
/// </summary>
internal const String DefaultJobName = "__default";
/// <summary>
/// The default job timeout in minutes.
/// </summary>
internal const Int32 DefaultJobTimeoutInMinutes = 360;
/// <summary>
/// The max length for a node within a workflow - e.g. a job ID or a matrix configuration ID.
/// </summary>
internal const Int32 MaxNodeNameLength = 100;
/// <summary>
/// Alias for the self repository.
/// </summary>
internal const String SelfAlias = "self";
public static class PermissionsPolicy
{
public const string LimitedRead = "LimitedRead";
public const string Write = "Write";
}
}
}

View File

@@ -0,0 +1,95 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Runtime.Serialization;
namespace GitHub.Actions.WorkflowParser
{
/// <summary>
/// Features flags (mostly short-lived)
/// </summary>
[DataContract]
public class WorkflowFeatures
{
/// <summary>
/// Gets or sets a value indicating whether users may specify permission "id-token".
/// Used during parsing only.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public bool IdToken { get; set; } // Remove with DistributedTask.AllowGenerateIdToken
/// <summary>
/// Gets or sets a value indicating whether users may specify permission "short-matrix-ids".
/// Used during parsing and evaluation.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public bool ShortMatrixIds { get; set; } // Remove with DistributedTask.GenerateShortMatrixIds
/// <summary>
/// Gets or sets a value indicating whether users may use the "snapshot" keyword.
/// Used during parsing only.
/// More information: https://github.com/github/hosted-runners/issues/186
/// </summary>
[DataMember(EmitDefaultValue = false)]
public bool Snapshot { get; set; }
/// <summary>
/// Gets or sets a value indicating whether users may use the "models" permission.
/// Used during parsing only.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public bool AllowModelsPermission { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the expression function fromJson performs strict JSON parsing.
/// Used during evaluation only.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public bool StrictJsonParsing { get; set; }
/// <summary>
/// Gets the default workflow features.
/// </summary>
public static WorkflowFeatures GetDefaults()
{
return new WorkflowFeatures
{
IdToken = true, // Default to true since this is a long-lived feature flag
ShortMatrixIds = true, // Default to true since this is a long-lived feature flag
Snapshot = false, // Default to false since this feature is still in an experimental phase
StrictJsonParsing = false, // Default to false since this is temporary for telemetry purposes only
AllowModelsPermission = false, // Default to false since we want this to be disabled for all non-production environments
};
}
/// <summary>
/// Gets the value of the feature flag
/// </summary>
public bool GetFeature(string name)
{
return (bool)s_properties[name].GetValue(this)!;
}
/// <summary>
/// Sets the value of the feature flag
/// </summary>
public void SetFeature(string name, bool value)
{
s_properties[name].SetValue(this, value);
}
/// <summary>
/// Reflection info for accessing the feature flags
/// </summary>
private static readonly Dictionary<string, PropertyInfo> s_properties =
typeof(WorkflowFeatures).GetProperties(BindingFlags.Public | BindingFlags.Instance)
.Where(x => x.PropertyType == typeof(bool)) // Boolean properties only
.ToDictionary(x => x.Name, StringComparer.Ordinal);
/// <summary>
/// Names of all feature flags
/// </summary>
public static readonly IReadOnlyList<string> Names = s_properties.Keys.Order().ToList().AsReadOnly();
}
}

View File

@@ -0,0 +1,172 @@
// <auto-generated/>
// *** AUTOMATICALLY GENERATED BY GenResourceClass -- DO NOT EDIT!!! ***
using System;
using System.Diagnostics;
using System.ComponentModel;
using System.Globalization;
using System.Reflection;
using System.Resources;
namespace GitHub.Actions.WorkflowParser {
internal static class WorkflowStrings
{
//********************************************************************************************
/// Creates the resource manager instance.
//********************************************************************************************
static WorkflowStrings()
{
s_resMgr = new ResourceManager("GitHub.Actions.WorkflowParser.WorkflowStrings", typeof(WorkflowStrings).GetTypeInfo().Assembly);
}
public static ResourceManager Manager
{
get
{
return s_resMgr;
}
}
//********************************************************************************************
/// Returns a localized string given a resource string name.
//********************************************************************************************
public static String Get(
String resourceName)
{
return s_resMgr.GetString(resourceName, CultureInfo.CurrentUICulture);
}
//********************************************************************************************
/// Returns a localized integer given a resource string name.
//********************************************************************************************
public static int GetInt(
String resourceName)
{
return (int)s_resMgr.GetObject(resourceName, CultureInfo.CurrentUICulture);
}
//********************************************************************************************
/// Returns a localized string given a resource string name.
//********************************************************************************************
public static bool GetBool(
String resourceName)
{
return (bool)s_resMgr.GetObject(resourceName, CultureInfo.CurrentUICulture);
}
//********************************************************************************************
/// A little helper function to alleviate some typing associated with loading resources and
/// formatting the strings. In DEBUG builds, it also asserts that the number of format
/// arguments and the length of args match.
//********************************************************************************************
private static String Format( // The formatted resource string.
String resourceName, // The name of the resource.
params Object[] args) // Arguments to format.
{
String resource = Get(resourceName);
#if DEBUG
// Check to make sure that the number of format string arguments matches the number of
// arguments passed in.
int formatArgCount = 0;
bool[] argSeen = new bool[100];
for (int i = 0; i < resource.Length; i++)
{
if (resource[i] == '{')
{
if (i + 1 < resource.Length &&
resource[i + 1] == '{')
{
i++; // Skip the escaped curly braces.
}
else
{
// Move past the curly brace and leading whitespace.
i++;
while (Char.IsWhiteSpace(resource[i]))
{
i++;
}
// Get the argument number.
int length = 0;
while (i + length < resource.Length && Char.IsDigit(resource[i + length]))
{
length++;
}
// Record it if it hasn't already been seen.
int argNumber = int.Parse(resource.Substring(i, length), CultureInfo.InvariantCulture);
if (!argSeen[argNumber])
{
formatArgCount++; // Count it as a formatting argument.
argSeen[argNumber] = true;
}
}
}
}
Debug.Assert(args != null || formatArgCount == 0,
String.Format(CultureInfo.InvariantCulture, "The number of format arguments is {0}, but the args parameter is null.", formatArgCount));
Debug.Assert(args == null || formatArgCount == args.Length,
String.Format(CultureInfo.InvariantCulture, "Coding error using resource \"{0}\": The number of format arguments {1} != number of args {2}",
resourceName, formatArgCount, args != null ? args.Length : 0));
#endif // DEBUG
if (args == null)
{
return resource;
}
// If there are any DateTime structs in the arguments, we need to bracket them
// to make sure they are within the supported range of the current calendar.
for (int i = 0; i < args.Length; i++)
{
// DateTime is a struct, we cannot use the as operator and null check.
if (args[i] is DateTime)
{
DateTime dateTime = (DateTime)args[i];
// We need to fetch the calendar on each Format call since it may change.
// Since we don't have more than one DateTime for resource, do not
// bother to cache this for the duration of the for loop.
Calendar calendar = DateTimeFormatInfo.CurrentInfo.Calendar;
if (dateTime > calendar.MaxSupportedDateTime)
{
args[i] = calendar.MaxSupportedDateTime;
}
else if (dateTime < calendar.MinSupportedDateTime)
{
args[i] = calendar.MinSupportedDateTime;
}
}
}
return String.Format(CultureInfo.CurrentCulture, resource, args);
}
// According to the documentation for the ResourceManager class, it should be sufficient to
// create a single static instance. The following is an excerpt from the 1.1 documentation.
// Using the methods of ResourceManager, a caller can access the resources for a particular
// culture using the GetObject and GetString methods. By default, these methods return the
// resource for the culture determined by the current cultural settings of the thread that made
// the call.
private static ResourceManager s_resMgr;
/// <summary>
/// The workflow is not valid.
/// </summary>
public static String WorkflowNotValid() { return Get("WorkflowNotValid"); }
/// <summary>
/// The workflow is not valid. {0}
/// </summary>
public static String WorkflowNotValidWithErrors(object arg0) { return Format("WorkflowNotValidWithErrors", arg0); }
}
} // namespace

View File

@@ -0,0 +1,126 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<data name="WorkflowNotValid" xml:space="preserve">
<value>The workflow is not valid.</value>
</data>
<data name="WorkflowNotValidWithErrors" xml:space="preserve">
<value>The workflow is not valid. {0}</value>
</data>
</root>

View File

@@ -0,0 +1,188 @@
#nullable enable
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser
{
[DataContract]
public class WorkflowTemplate
{
public IDictionary<String, String> InputTypes
{
get
{
if (m_inputTypes == null)
{
m_inputTypes = new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
}
return m_inputTypes;
}
}
[DataMember(Order = 0, Name = "input-types", EmitDefaultValue = false)]
private Dictionary<String, String>? m_inputTypes;
[DataMember(Order = 1, Name = "env", EmitDefaultValue = false)]
public TemplateToken? Env
{
get;
set;
}
[DataMember(Order = 2, Name = "permissions", EmitDefaultValue = false)]
public Permissions? Permissions
{
get;
set;
}
[DataMember(Order = 3, Name = "defaults", EmitDefaultValue = false)]
public TemplateToken? Defaults
{
get;
set;
}
[DataMember(Order = 4, Name = "concurrency", EmitDefaultValue = false)]
public TemplateToken? Concurrency
{
get;
set;
}
public IList<IJob> Jobs
{
get
{
if (m_jobs == null)
{
m_jobs = new List<IJob>();
}
return m_jobs;
}
}
[DataMember(Order = 5, Name = "jobs", EmitDefaultValue = false)]
private List<IJob>? m_jobs;
public List<String> FileTable
{
get
{
if (m_fileTable == null)
{
m_fileTable = new List<String>();
}
return m_fileTable;
}
}
[DataMember(Order = 6, Name = "file-table", EmitDefaultValue = false)]
private List<String>? m_fileTable;
public IList<WorkflowValidationError> Errors
{
get
{
if (m_errors == null)
{
m_errors = new List<WorkflowValidationError>();
}
return m_errors;
}
}
[DataMember(Order = 7, Name = "errors", EmitDefaultValue = false)]
private List<WorkflowValidationError>? m_errors;
[EditorBrowsable(EditorBrowsableState.Never)]
public List<FileInfo> FileInfo
{
get
{
if (m_fileInfo == null)
{
m_fileInfo = new List<FileInfo>();
}
return m_fileInfo;
}
}
[DataMember(Order = 8, Name = "file-info", EmitDefaultValue = false)]
private List<FileInfo>? m_fileInfo;
[IgnoreDataMember]
public String? InitializationLog
{
get;
set;
}
[IgnoreDataMember]
public Telemetry? Telemetry
{
get;
set;
}
public void CheckErrors()
{
if (m_errors?.Count > 0)
{
throw new WorkflowValidationException(m_errors);
}
}
internal WorkflowTemplate Clone(bool omitSource)
{
var result = new WorkflowTemplate
{
Concurrency = Concurrency?.Clone(omitSource),
Defaults = Defaults?.Clone(omitSource),
Env = Env?.Clone(omitSource),
Permissions = Permissions?.Clone(),
};
result.Errors.AddRange(Errors.Select(x => x.Clone()));
result.InitializationLog = InitializationLog;
result.InputTypes.AddRange(InputTypes);
result.Jobs.AddRange(Jobs.Select(x => x.Clone(omitSource)));
if (!omitSource)
{
result.FileTable.AddRange(FileTable);
result.FileInfo.AddRange(FileInfo.Select(x => x.Clone()));
}
return result;
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_inputTypes?.Count == 0)
{
m_inputTypes = null;
}
if (m_jobs?.Count == 0)
{
m_jobs = null;
}
if (m_errors?.Count == 0)
{
m_errors = null;
}
if (m_fileTable?.Count == 0)
{
m_fileTable = null;
}
if (m_fileInfo?.Count == 0)
{
m_fileInfo = null;
}
}
}
}

View File

@@ -0,0 +1,986 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Threading;
using GitHub.Actions.Expressions;
using GitHub.Actions.Expressions.Data;
using GitHub.Actions.Expressions.Sdk.Functions;
using GitHub.Actions.WorkflowParser.Conversion;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
using ITraceWriter = GitHub.Actions.WorkflowParser.ObjectTemplating.ITraceWriter;
namespace GitHub.Actions.WorkflowParser
{
/// <summary>
/// Evaluates parts of the workflow DOM. For example, a job strategy or step inputs.
/// </summary>
public class WorkflowTemplateEvaluator
{
/// <summary>
/// Creates a new instance for evaluating tokens within a workflow template.
/// </summary>
/// <param name="trace">Optional trace writer for telemetry</param>
/// <param name="fileTable">Optional file table from the workflow template, for better error messages</param>
/// <param name="features">Optional workflow features</param>
public WorkflowTemplateEvaluator(
ITraceWriter trace,
IList<String> fileTable,
WorkflowFeatures features)
{
m_trace = trace ?? new EmptyTraceWriter();
m_fileTable = fileTable;
m_features = features ?? WorkflowFeatures.GetDefaults();
m_schema = WorkflowSchemaFactory.GetSchema(m_features);
}
/// <summary>
/// Creates a new instance for evaluating tokens within a workflow template.
/// </summary>
/// <param name="trace">Optional trace writer for telemetry</param>
/// <param name="fileTable">Optional file table from the workflow template, for better error messages</param>
/// <param name="features">Optional workflow features</param>
/// <param name="parentMemory">Optional parent memory counter, for byte tracking across evaluation calls.</param>
public WorkflowTemplateEvaluator(
ITraceWriter trace,
IList<String> fileTable,
WorkflowFeatures features,
TemplateMemory parentMemory)
{
m_trace = trace ?? new EmptyTraceWriter();
m_fileTable = fileTable;
m_features = features ?? WorkflowFeatures.GetDefaults();
m_schema = WorkflowSchemaFactory.GetSchema(m_features);
m_parentMemory = parentMemory;
}
public Int32 MaxDepth => 50;
/// <summary>
/// Gets the maximum error message length before the message will be truncated.
/// </summary>
public Int32 MaxErrorMessageLength { get; set; } = 500;
/// <summary>
/// Gets the maximum number of errors that can be recorded when parsing a workflow.
/// </summary>
public Int32 MaxErrors => 10;
public Int32 MaxEvents => 1000000; // 1 million
public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb
public Boolean EvaluateStageIf(
String stageId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState)
{
var result = default(Boolean?);
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.If}' for stage '{stageId}'.";
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions, expressionState);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.JobIfResult, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToIfResult(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result ?? throw new InvalidOperationException("Stage if cannot be null");
}
public Boolean EvaluateJobIf(
String jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState)
{
var result = default(Boolean?);
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.If}' for job '{jobId}'.";
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions, expressionState);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.JobIfResult, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToIfResult(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result ?? throw new InvalidOperationException("Job if cannot be null");
}
/// <summary>
/// Evaluates a job strategy token
/// </summary>
/// <param name="jobName">The default job display name (any display name expression is evaluated after strategy)</param>
public Strategy EvaluateStrategy(
String jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions,
String jobName)
{
var result = new Strategy();
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.Strategy}' for job '{jobId}'.";
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.Strategy, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToStrategy(context, token, jobName);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
if (result.Configurations.Count == 0)
{
var configuration = new StrategyConfiguration
{
Id = WorkflowConstants.DefaultJobName,
Name = new JobNameBuilder(jobName).Build(),
};
configuration.ExpressionData.Add(WorkflowTemplateConstants.Matrix, null);
configuration.ExpressionData.Add(
WorkflowTemplateConstants.Strategy,
new DictionaryExpressionData
{
{
"fail-fast",
new BooleanExpressionData(result.FailFast)
},
{
"job-index",
new NumberExpressionData(0)
},
{
"job-total",
new NumberExpressionData(1)
},
{
"max-parallel",
new NumberExpressionData(1)
}
});
result.Configurations.Add(configuration);
}
return result;
}
public String EvaluateJobName(
String jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions,
String defaultName)
{
var result = default(String);
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.Name}' for job '{jobId}'.";
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.StringStrategyContext, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToJobName(context, token);
if (string.IsNullOrEmpty(result))
{
result = defaultName;
context.Memory.AddBytes(defaultName);
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result;
}
public DictionaryExpressionData EvaluateWorkflowJobInputs(
ReusableWorkflowJob workflowJob,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var inputDefinitions = workflowJob.InputDefinitions;
var inputValues = workflowJob.InputValues;
var result = default(DictionaryExpressionData);
if (inputDefinitions != null && inputDefinitions.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
var inputDefinitionsToken = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.WorkflowCallInputs, inputDefinitions, 0, null);
context.Errors.Check();
var inputValuesToken = default(TemplateToken);
if (inputValues != null && inputValues.Type != TokenType.Null)
{
inputValuesToken = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.WorkflowJobWith, inputValues, 0, null);
context.Errors.Check();
}
result = WorkflowTemplateConverter.ConvertToWorkflowJobInputs(context, inputDefinitionsToken, inputValuesToken, workflowJob);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new DictionaryExpressionData();
}
public IDictionary<String, String> EvaluateWorkflowJobOutputs(
MappingToken outputDefinitions,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(IDictionary<String, String>);
if (outputDefinitions != null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
var outputs = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.WorkflowCallOutputs, outputDefinitions, 0, null);
result = WorkflowTemplateConverter.ConvertToWorkflowJobOutputs(outputs);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
}
public ActionsEnvironmentReference EvaluateJobEnvironment(
string jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(ActionsEnvironmentReference);
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.Environment}' for job '{jobId}'.";
if (token != null && token.Type != TokenType.Null)
{
// Set "addMissingContexts:false" because the environment contains some properties
// that are intended to be evaluated on the server, and others on the runner.
//
// For example:
// environment:
// name: ${{ this evaluates on the server }}
// url: ${{ this evaluates on the runner }}
var context = CreateContext(expressionData, expressionFunctions, addMissingContexts: false);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.JobEnvironment, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToActionEnvironmentReference(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result;
}
public TemplateToken EvaluateJobEnvironmentUrl(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(TemplateToken);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.StringRunnerContextNoSecrets, token, 0, null);
context.Errors.Check();
result = token.AssertString("environment.url");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public GroupPermitSetting EvaluateConcurrency(
String jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(GroupPermitSetting);
string type;
string errorPrefix;
if (String.IsNullOrEmpty(jobId))
{
type = WorkflowTemplateConstants.WorkflowConcurrency;
errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.Concurrency}'.";
}
else
{
type = WorkflowTemplateConstants.JobConcurrency;
errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.Concurrency}' for job '{jobId}'.";
}
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, type, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToConcurrency(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result;
}
public RunsOn EvaluateRunsOn(
String jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(RunsOn);
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.RunsOn}' for job '{jobId}'.";
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.RunsOn, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToRunsOn(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result ?? throw new InvalidOperationException("Job target cannot be null");
}
public Snapshot EvaluateSnapshot(
String jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Snapshot);
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.Snapshot}' for job '{jobId}'.";
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.Snapshot, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToSnapshot(context, token);
}
catch (Exception ex) when (ex is not TemplateValidationException)
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result;
}
public Int32 EvaluateJobTimeout(
String jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Int32?);
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.TimeoutMinutes}' for job '{jobId}'.";
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.NumberStrategyContext, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToJobTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result ?? WorkflowConstants.DefaultJobTimeoutInMinutes;
}
public Int32 EvaluateJobCancelTimeout(
String jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Int32?);
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.CancelTimeoutMinutes}' for job '{jobId}'.";
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.NumberStrategyContext, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToJobCancelTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result ?? WorkflowConstants.DefaultJobCancelTimeoutInMinutes;
}
public Boolean EvaluateJobContinueOnError(
String jobId,
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Boolean?);
var errorPrefix = $"Error when evaluating '{WorkflowTemplateConstants.ContinueOnError}' for job '{jobId}'.";
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.BooleanStrategyContext, token, 0, null);
context.Errors.Check(errorPrefix);
result = WorkflowTemplateConverter.ConvertToJobContinueOnError(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check(errorPrefix);
}
return result ?? false;
}
public Boolean EvaluateStepContinueOnError(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Boolean?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.BooleanStepsContext, token, 0, null);
context.Errors.Check();
result = WorkflowTemplateConverter.ConvertToStepContinueOnError(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? false;
}
public String EvaluateStepName(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(String);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.StringStepsContext, token, 0, null);
context.Errors.Check();
result = WorkflowTemplateConverter.ConvertToStepName(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Boolean EvaluateStepIf(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState)
{
var result = default(Boolean?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions, expressionState);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.StepIfResult, token, 0, null);
context.Errors.Check();
result = WorkflowTemplateConverter.ConvertToIfResult(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? throw new InvalidOperationException("Step if cannot be null");
}
public Dictionary<String, String> EvaluateStepEnvironment(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions,
StringComparer keyComparer)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.StepEnv, token, 0, null);
context.Errors.Check();
result = WorkflowTemplateConverter.ConvertToStepEnvironment(context, token, keyComparer);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new Dictionary<String, String>(keyComparer);
}
public Dictionary<String, String> EvaluateStepInputs(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.StepWith, token, 0, null);
context.Errors.Check();
result = WorkflowTemplateConverter.ConvertToStepInputs(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
}
public Int32 EvaluateStepTimeout(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.NumberStepsContext, token, 0, null);
context.Errors.Check();
result = WorkflowTemplateConverter.ConvertToStepTimeout(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? 0;
}
public JobContainer EvaluateJobContainer(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(JobContainer);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.Container, token, 0, null);
context.Errors.Check();
result = WorkflowTemplateConverter.ConvertToJobContainer(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public IList<KeyValuePair<String, JobContainer>> EvaluateJobServiceContainers(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(List<KeyValuePair<String, JobContainer>>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.Services, token, 0, null);
context.Errors.Check();
result = WorkflowTemplateConverter.ConvertToJobServiceContainers(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Dictionary<String, String> EvaluateJobDefaultsRun(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.JobDefaultsRun, token, 0, null);
context.Errors.Check();
result = new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
var mapping = token.AssertMapping("defaults run");
foreach (var pair in mapping)
{
// Literal key
var key = pair.Key.AssertString("defaults run key");
// Literal value
var value = pair.Value.AssertString("defaults run value");
result[key.Value] = value.Value;
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Dictionary<String, String> EvaluateJobOutputs(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.JobOutputs, token, 0, null);
context.Errors.Check();
result = new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
var mapping = token.AssertMapping("outputs");
foreach (var pair in mapping)
{
// Literal key
var key = pair.Key.AssertString("output key");
// Literal value
var value = pair.Value.AssertString("output value");
result[key.Value] = value.Value;
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public IDictionary<String, String> EvaluateWorkflowJobSecrets(
TemplateToken token,
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(IDictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(expressionData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, WorkflowTemplateConstants.WorkflowJobSecrets, token, 0, null);
context.Errors.Check();
result = WorkflowTemplateConverter.ConvertToWorkflowJobSecrets(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
}
private TemplateContext CreateContext(
DictionaryExpressionData expressionData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState = null,
Boolean addMissingContexts = true)
{
var result = new TemplateContext
{
CancellationToken = CancellationToken.None,
Errors = new TemplateValidationErrors(MaxErrors, MaxErrorMessageLength),
Memory = new TemplateMemory(
maxDepth: MaxDepth,
maxEvents: MaxEvents,
maxBytes: MaxResultSize,
parent: m_parentMemory),
Schema = m_schema,
StrictJsonParsing = m_features.StrictJsonParsing,
TraceWriter = m_trace,
};
result.SetFeatures(m_features);
// Add the file table
if (m_fileTable?.Count > 0)
{
foreach (var file in m_fileTable)
{
result.GetFileId(file);
}
}
// Add named values
if (expressionData != null)
{
foreach (var pair in expressionData)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
}
// Add functions
var functionNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
if (expressionFunctions?.Count > 0)
{
foreach (var function in expressionFunctions)
{
result.ExpressionFunctions.Add(function);
functionNames.Add(function.Name);
}
}
// Add missing expression values and expression functions.
// This solves the following problems:
// - Compat for new agent against old server (new contexts not sent down in job message)
// - Evaluating early when all referenced contexts are available, even though all allowed
// contexts may not yet be available. For example, evaluating step name can often
// be performed early.
if (addMissingContexts)
{
foreach (var name in s_expressionValueNames)
{
if (!result.ExpressionValues.ContainsKey(name))
{
result.ExpressionValues[name] = null;
}
}
foreach (var name in s_expressionFunctionNames)
{
if (!functionNames.Contains(name))
{
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue));
}
}
}
// Add vars context even if addMissingContexts is false to avoid
// JobEnvironment Evaluation errors
if(!result.ExpressionValues.ContainsKey(WorkflowTemplateConstants.Vars))
{
result.ExpressionValues[WorkflowTemplateConstants.Vars] = null;
}
// Add state
if (expressionState != null)
{
foreach (var pair in expressionState)
{
result.State[pair.Key] = pair.Value;
}
}
return result;
}
private readonly ITraceWriter m_trace;
private readonly TemplateSchema m_schema;
private readonly IList<String> m_fileTable;
private readonly WorkflowFeatures m_features;
private readonly TemplateMemory m_parentMemory;
private readonly String[] s_expressionValueNames = new[]
{
WorkflowTemplateConstants.GitHub,
WorkflowTemplateConstants.Needs,
WorkflowTemplateConstants.Strategy,
WorkflowTemplateConstants.Matrix,
WorkflowTemplateConstants.Secrets,
WorkflowTemplateConstants.Vars,
WorkflowTemplateConstants.Steps,
WorkflowTemplateConstants.Inputs,
WorkflowTemplateConstants.Jobs,
WorkflowTemplateConstants.Job,
WorkflowTemplateConstants.Runner,
WorkflowTemplateConstants.Env,
};
private readonly String[] s_expressionFunctionNames = new[]
{
WorkflowTemplateConstants.Always,
WorkflowTemplateConstants.Cancelled,
WorkflowTemplateConstants.Failure,
WorkflowTemplateConstants.HashFiles,
WorkflowTemplateConstants.Success,
};
}
}

View File

@@ -0,0 +1,134 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Threading;
using GitHub.Actions.WorkflowParser.Conversion;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
namespace GitHub.Actions.WorkflowParser
{
using GitHub.Actions.WorkflowParser.ObjectTemplating;
/// <summary>
/// Parses a workflow YAML file.
/// </summary>
public sealed class WorkflowTemplateParser
{
public WorkflowTemplateParser(
IServerTraceWriter serverTrace,
ITraceWriter trace,
ParseOptions options,
WorkflowFeatures features)
{
m_serverTrace = serverTrace ?? new EmptyServerTraceWriter();
m_trace = trace ?? new EmptyTraceWriter();
m_parseOptions = new ParseOptions(options ?? throw new ArgumentNullException(nameof(options)));
m_features = features ?? WorkflowFeatures.GetDefaults();
}
/// <summary>
/// Loads the YAML workflow template
/// </summary>
public WorkflowTemplate LoadWorkflow(
IFileProvider fileProvider,
String path,
String permissionPolicy,
IDictionary<string, ReferencedWorkflow> referencedWorkflows,
CancellationToken cancellationToken)
{
(var result, _) = LoadWorkflowInternal(fileProvider, path, permissionPolicy, referencedWorkflows, cancellationToken);
return result;
}
/// <summary>
/// Loads the YAML workflow template and the estimated number of bytes consumed in memory (for x-lang unit tests)
/// </summary>
/// <returns>The workflow template, and the estimated number of bytes consumed in memory</returns>
internal (WorkflowTemplate, int) LoadWorkflowInternal(
IFileProvider fileProvider,
String path,
String permissionPolicy,
IDictionary<string, ReferencedWorkflow> referencedWorkflows,
CancellationToken cancellationToken)
{
fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider));
TemplateContext context;
YamlTemplateLoader loader;
TemplateToken tokens;
// Parse template tokens
(context, loader, tokens) = ParseTemplate(fileProvider, path, cancellationToken);
var usage = new WorkflowUsage(m_serverTrace);
try
{
// Gather telemetry
usage.Gather(context, tokens);
// Convert to workflow types
var workflowTemplate = WorkflowTemplateConverter.ConvertToWorkflow(context, tokens);
// Set telemetry
workflowTemplate.Telemetry = context.Telemetry;
// Load reusable workflows
ReusableWorkflowsLoader.Load(m_serverTrace, m_trace, m_parseOptions, usage, context, workflowTemplate, loader, permissionPolicy, referencedWorkflows);
// Error state? Throw away the model
if (workflowTemplate.Errors.Count > 0)
{
var errorTemplate = new WorkflowTemplate();
errorTemplate.Errors.AddRange(workflowTemplate.Errors);
errorTemplate.FileTable.AddRange(workflowTemplate.FileTable);
errorTemplate.Telemetry = context.Telemetry;
workflowTemplate = errorTemplate;
}
return (workflowTemplate, context.Memory.CurrentBytes);
}
finally
{
usage.Trace();
}
}
/// <summary>
/// Parses a workflow template file.
/// <summary>
private (TemplateContext, YamlTemplateLoader, TemplateToken) ParseTemplate(
IFileProvider fileProvider,
String path,
CancellationToken cancellationToken)
{
// Setup the template context
var context = new TemplateContext
{
CancellationToken = cancellationToken,
Errors = new TemplateValidationErrors(m_parseOptions.MaxErrors, m_parseOptions.MaxErrorMessageLength),
Memory = new TemplateMemory(
maxDepth: m_parseOptions.MaxDepth,
maxEvents: m_parseOptions.MaxParseEvents,
maxBytes: m_parseOptions.MaxResultSize),
Schema = WorkflowSchemaFactory.GetSchema(m_features),
TraceWriter = m_trace,
};
context.SetFeatures(m_features);
context.SetJobCountValidator(new JobCountValidator(context, m_parseOptions.MaxJobLimit));
// Setup the template loader
var loader = new YamlTemplateLoader(new ParseOptions(m_parseOptions), fileProvider);
// Parse the template tokens
var tokens = loader.ParseWorkflow(context, path);
return (context, loader, tokens);
}
private readonly WorkflowFeatures m_features;
private readonly ParseOptions m_parseOptions;
private readonly IServerTraceWriter m_serverTrace;
private readonly ITraceWriter m_trace;
}
}

Some files were not shown because too many files have changed in this diff Show More