mirror of
https://github.com/actions/runner.git
synced 2025-12-20 06:29:53 +00:00
Compare updated template evaluator (#4092)
This commit is contained in:
14
src/Sdk/WorkflowParser/Conversion/EmptyServerTraceWriter.cs
Normal file
14
src/Sdk/WorkflowParser/Conversion/EmptyServerTraceWriter.cs
Normal file
@@ -0,0 +1,14 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
internal sealed class EmptyServerTraceWriter : IServerTraceWriter
|
||||
{
|
||||
public void TraceAlways(
|
||||
Int32 tracepoint,
|
||||
String format,
|
||||
params Object[] arguments)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
183
src/Sdk/WorkflowParser/Conversion/IdBuilder.cs
Normal file
183
src/Sdk/WorkflowParser/Conversion/IdBuilder.cs
Normal file
@@ -0,0 +1,183 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
/// <summary>
|
||||
/// Builder for job and step IDs
|
||||
/// </summary>
|
||||
internal sealed class IdBuilder
|
||||
{
|
||||
internal void AppendSegment(String value)
|
||||
{
|
||||
if (String.IsNullOrEmpty(value))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (m_name.Length == 0)
|
||||
{
|
||||
var first = value[0];
|
||||
if ((first >= 'a' && first <= 'z') ||
|
||||
(first >= 'A' && first <= 'Z') ||
|
||||
first == '_')
|
||||
{
|
||||
// Legal first char
|
||||
}
|
||||
else if ((first >= '0' && first <= '9') || first == '-')
|
||||
{
|
||||
// Illegal first char, but legal char.
|
||||
// Prepend "_".
|
||||
m_name.Append("_");
|
||||
}
|
||||
else
|
||||
{
|
||||
// Illegal char
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Separator
|
||||
m_name.Append(c_separator);
|
||||
}
|
||||
|
||||
foreach (var c in value)
|
||||
{
|
||||
if ((c >= 'a' && c <= 'z') ||
|
||||
(c >= 'A' && c <= 'Z') ||
|
||||
(c >= '0' && c <= '9') ||
|
||||
c == '_' ||
|
||||
c == '-')
|
||||
{
|
||||
// Legal
|
||||
m_name.Append(c);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Illegal
|
||||
m_name.Append("_");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds the ID from the segments
|
||||
/// </summary>
|
||||
/// <param name="allowReservedPrefix">When true, generated IDs may begin with "__" depending upon the segments
|
||||
/// and collisions with known IDs. When false, generated IDs will never begin with the reserved prefix "__".</param>
|
||||
/// <param name="maxLength">The maximum length of the generated ID.</param>
|
||||
internal String Build(
|
||||
Boolean allowReservedPrefix,
|
||||
Int32 maxLength = WorkflowConstants.MaxNodeNameLength)
|
||||
{
|
||||
// Ensure reasonable max length
|
||||
if (maxLength <= 5) // Must be long enough to accommodate at least one character + length of max suffix "_999" (refer suffix logic further below)
|
||||
{
|
||||
maxLength = WorkflowConstants.MaxNodeNameLength;
|
||||
}
|
||||
|
||||
var original = m_name.Length > 0 ? m_name.ToString() : "job";
|
||||
|
||||
// Avoid prefix "__" when not allowed
|
||||
if (!allowReservedPrefix && original.StartsWith("__", StringComparison.Ordinal))
|
||||
{
|
||||
original = $"_{original.TrimStart('_')}";
|
||||
}
|
||||
|
||||
var attempt = 1;
|
||||
var suffix = default(String);
|
||||
while (true)
|
||||
{
|
||||
if (attempt == 1)
|
||||
{
|
||||
suffix = String.Empty;
|
||||
}
|
||||
else if (attempt < 1000)
|
||||
{
|
||||
// Special case to avoid prefix "__" when not allowed
|
||||
if (!allowReservedPrefix && String.Equals(original, "_", StringComparison.Ordinal))
|
||||
{
|
||||
suffix = String.Format(CultureInfo.InvariantCulture, "{0}", attempt);
|
||||
}
|
||||
else
|
||||
{
|
||||
suffix = String.Format(CultureInfo.InvariantCulture, "_{0}", attempt);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Unable to create a unique name");
|
||||
}
|
||||
|
||||
var candidate = original.Substring(0, Math.Min(original.Length, maxLength - suffix.Length)) + suffix;
|
||||
|
||||
if (m_distinctNames.Add(candidate))
|
||||
{
|
||||
m_name.Clear();
|
||||
return candidate;
|
||||
}
|
||||
|
||||
attempt++;
|
||||
}
|
||||
}
|
||||
|
||||
internal Boolean TryAddKnownId(
|
||||
String value,
|
||||
out String error)
|
||||
{
|
||||
if (String.IsNullOrEmpty(value) ||
|
||||
!IsValid(value) ||
|
||||
value.Length >= WorkflowConstants.MaxNodeNameLength)
|
||||
{
|
||||
error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and must be less than {WorkflowConstants.MaxNodeNameLength} characters.";
|
||||
return false;
|
||||
}
|
||||
else if (value.StartsWith("__", StringComparison.Ordinal))
|
||||
{
|
||||
error = $"The identifier '{value}' is invalid. IDs starting with '__' are reserved.";
|
||||
return false;
|
||||
}
|
||||
else if (!m_distinctNames.Add(value))
|
||||
{
|
||||
error = $"The identifier '{value}' may not be used more than once within the same scope.";
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
error = null;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private static Boolean IsValid(String name)
|
||||
{
|
||||
var result = true;
|
||||
for (Int32 i = 0; i < name.Length; i++)
|
||||
{
|
||||
if ((name[i] >= 'a' && name[i] <= 'z') ||
|
||||
(name[i] >= 'A' && name[i] <= 'Z') ||
|
||||
(name[i] >= '0' && name[i] <= '9' && i > 0) ||
|
||||
(name[i] == '_') ||
|
||||
(name[i] == '-' && i > 0))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
result = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private const String c_separator = "_";
|
||||
private readonly HashSet<String> m_distinctNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly StringBuilder m_name = new StringBuilder();
|
||||
}
|
||||
}
|
||||
44
src/Sdk/WorkflowParser/Conversion/JobCountValidator.cs
Normal file
44
src/Sdk/WorkflowParser/Conversion/JobCountValidator.cs
Normal file
@@ -0,0 +1,44 @@
|
||||
#nullable enable
|
||||
|
||||
using System;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
internal sealed class JobCountValidator
|
||||
{
|
||||
public JobCountValidator(
|
||||
TemplateContext context,
|
||||
Int32 maxCount)
|
||||
{
|
||||
m_context = context ?? throw new ArgumentNullException(nameof(context));
|
||||
m_maxCount = maxCount;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Increments the job counter.
|
||||
///
|
||||
/// Appends an error to the template context only when the max job count is initially exceeded.
|
||||
/// Additional calls will not append more errors.
|
||||
/// </summary>
|
||||
/// <param name="token">The token to use for error reporting.</param>
|
||||
public void Increment(TemplateToken? token)
|
||||
{
|
||||
// Initial breach?
|
||||
if (m_maxCount > 0 &&
|
||||
m_count + 1 > m_maxCount &&
|
||||
m_count <= m_maxCount)
|
||||
{
|
||||
m_context.Error(token, $"Workflows may not contain more than {m_maxCount} jobs across all referenced files");
|
||||
}
|
||||
|
||||
// Increment
|
||||
m_count++;
|
||||
}
|
||||
|
||||
private readonly TemplateContext m_context;
|
||||
private readonly Int32 m_maxCount;
|
||||
private Int32 m_count;
|
||||
}
|
||||
}
|
||||
64
src/Sdk/WorkflowParser/Conversion/JobNameBuilder.cs
Normal file
64
src/Sdk/WorkflowParser/Conversion/JobNameBuilder.cs
Normal file
@@ -0,0 +1,64 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
/// <summary>
|
||||
/// Builder for job display names. Used when appending strategy configuration values to build a display name.
|
||||
/// </summary>
|
||||
internal sealed class JobNameBuilder
|
||||
{
|
||||
public JobNameBuilder(String jobName)
|
||||
{
|
||||
if (!String.IsNullOrEmpty(jobName))
|
||||
{
|
||||
m_jobName = jobName;
|
||||
m_segments = new List<String>();
|
||||
}
|
||||
}
|
||||
|
||||
public void AppendSegment(String value)
|
||||
{
|
||||
if (String.IsNullOrEmpty(value) || m_segments == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
m_segments.Add(value);
|
||||
}
|
||||
|
||||
public String Build()
|
||||
{
|
||||
if (String.IsNullOrEmpty(m_jobName))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var name = default(String);
|
||||
if (m_segments.Count == 0)
|
||||
{
|
||||
name = m_jobName;
|
||||
}
|
||||
else
|
||||
{
|
||||
var joinedSegments = String.Join(", ", m_segments);
|
||||
name = String.Format(CultureInfo.InvariantCulture, "{0} ({1})", m_jobName, joinedSegments);
|
||||
}
|
||||
|
||||
const Int32 maxNameLength = 100;
|
||||
if (name.Length > maxNameLength)
|
||||
{
|
||||
name = name.Substring(0, maxNameLength - 3) + "...";
|
||||
}
|
||||
|
||||
m_segments.Clear();
|
||||
return name;
|
||||
}
|
||||
|
||||
private readonly String m_jobName;
|
||||
private readonly List<String> m_segments;
|
||||
}
|
||||
}
|
||||
236
src/Sdk/WorkflowParser/Conversion/JsonObjectReader.cs
Normal file
236
src/Sdk/WorkflowParser/Conversion/JsonObjectReader.cs
Normal file
@@ -0,0 +1,236 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
internal sealed class JsonObjectReader : IObjectReader
|
||||
{
|
||||
internal JsonObjectReader(
|
||||
Int32? fileId,
|
||||
String input)
|
||||
{
|
||||
m_fileId = fileId;
|
||||
var token = JToken.Parse(input);
|
||||
m_enumerator = GetEvents(token, true).GetEnumerator();
|
||||
m_enumerator.MoveNext();
|
||||
}
|
||||
|
||||
public Boolean AllowLiteral(out LiteralToken literal)
|
||||
{
|
||||
var current = m_enumerator.Current;
|
||||
switch (current.Type)
|
||||
{
|
||||
case ParseEventType.Null:
|
||||
literal = new NullToken(m_fileId, current.Line, current.Column);
|
||||
m_enumerator.MoveNext();
|
||||
return true;
|
||||
|
||||
case ParseEventType.Boolean:
|
||||
literal = new BooleanToken(m_fileId, current.Line, current.Column, (Boolean)current.Value);
|
||||
m_enumerator.MoveNext();
|
||||
return true;
|
||||
|
||||
case ParseEventType.Number:
|
||||
literal = new NumberToken(m_fileId, current.Line, current.Column, (Double)current.Value);
|
||||
m_enumerator.MoveNext();
|
||||
return true;
|
||||
|
||||
case ParseEventType.String:
|
||||
literal = new StringToken(m_fileId, current.Line, current.Column, (String)current.Value);
|
||||
m_enumerator.MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
literal = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowSequenceStart(out SequenceToken sequence)
|
||||
{
|
||||
var current = m_enumerator.Current;
|
||||
if (current.Type == ParseEventType.SequenceStart)
|
||||
{
|
||||
sequence = new SequenceToken(m_fileId, current.Line, current.Column);
|
||||
m_enumerator.MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
sequence = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowSequenceEnd()
|
||||
{
|
||||
if (m_enumerator.Current.Type == ParseEventType.SequenceEnd)
|
||||
{
|
||||
m_enumerator.MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowMappingStart(out MappingToken mapping)
|
||||
{
|
||||
var current = m_enumerator.Current;
|
||||
if (current.Type == ParseEventType.MappingStart)
|
||||
{
|
||||
mapping = new MappingToken(m_fileId, current.Line, current.Column);
|
||||
m_enumerator.MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
mapping = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowMappingEnd()
|
||||
{
|
||||
if (m_enumerator.Current.Type == ParseEventType.MappingEnd)
|
||||
{
|
||||
m_enumerator.MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
|
||||
/// </summary>
|
||||
public void ValidateEnd()
|
||||
{
|
||||
if (m_enumerator.Current.Type == ParseEventType.DocumentEnd)
|
||||
{
|
||||
m_enumerator.MoveNext();
|
||||
return;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Expected end of reader");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
|
||||
/// </summary>
|
||||
public void ValidateStart()
|
||||
{
|
||||
if (m_enumerator.Current.Type == ParseEventType.DocumentStart)
|
||||
{
|
||||
m_enumerator.MoveNext();
|
||||
return;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Expected start of reader");
|
||||
}
|
||||
|
||||
private IEnumerable<ParseEvent> GetEvents(
|
||||
JToken token,
|
||||
Boolean root = false)
|
||||
{
|
||||
if (root)
|
||||
{
|
||||
yield return new ParseEvent(0, 0, ParseEventType.DocumentStart);
|
||||
}
|
||||
|
||||
var lineInfo = token as Newtonsoft.Json.IJsonLineInfo;
|
||||
var line = lineInfo.LineNumber;
|
||||
var column = lineInfo.LinePosition;
|
||||
|
||||
switch (token.Type)
|
||||
{
|
||||
case JTokenType.Null:
|
||||
yield return new ParseEvent(line, column, ParseEventType.Null, null);
|
||||
break;
|
||||
|
||||
case JTokenType.Boolean:
|
||||
yield return new ParseEvent(line, column, ParseEventType.Boolean, token.ToObject<Boolean>());
|
||||
break;
|
||||
|
||||
case JTokenType.Float:
|
||||
case JTokenType.Integer:
|
||||
yield return new ParseEvent(line, column, ParseEventType.Number, token.ToObject<Double>());
|
||||
break;
|
||||
|
||||
case JTokenType.String:
|
||||
yield return new ParseEvent(line, column, ParseEventType.String, token.ToObject<String>());
|
||||
break;
|
||||
|
||||
case JTokenType.Array:
|
||||
yield return new ParseEvent(line, column, ParseEventType.SequenceStart);
|
||||
foreach (var item in (token as JArray))
|
||||
{
|
||||
foreach (var e in GetEvents(item))
|
||||
{
|
||||
yield return e;
|
||||
}
|
||||
}
|
||||
yield return new ParseEvent(line, column, ParseEventType.SequenceEnd);
|
||||
break;
|
||||
|
||||
case JTokenType.Object:
|
||||
yield return new ParseEvent(line, column, ParseEventType.MappingStart);
|
||||
foreach (var pair in (token as JObject))
|
||||
{
|
||||
yield return new ParseEvent(line, column, ParseEventType.String, pair.Key ?? String.Empty);
|
||||
foreach (var e in GetEvents(pair.Value))
|
||||
{
|
||||
yield return e;
|
||||
}
|
||||
}
|
||||
yield return new ParseEvent(line, column, ParseEventType.MappingEnd);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected JTokenType {token.Type}");
|
||||
}
|
||||
|
||||
if (root)
|
||||
{
|
||||
yield return new ParseEvent(0, 0, ParseEventType.DocumentEnd);
|
||||
}
|
||||
}
|
||||
|
||||
private struct ParseEvent
|
||||
{
|
||||
public ParseEvent(
|
||||
Int32 line,
|
||||
Int32 column,
|
||||
ParseEventType type,
|
||||
Object value = null)
|
||||
{
|
||||
Line = line;
|
||||
Column = column;
|
||||
Type = type;
|
||||
Value = value;
|
||||
}
|
||||
|
||||
public readonly Int32 Line;
|
||||
public readonly Int32 Column;
|
||||
public readonly ParseEventType Type;
|
||||
public readonly Object Value;
|
||||
}
|
||||
|
||||
private enum ParseEventType
|
||||
{
|
||||
None = 0,
|
||||
Null,
|
||||
Boolean,
|
||||
Number,
|
||||
String,
|
||||
SequenceStart,
|
||||
SequenceEnd,
|
||||
MappingStart,
|
||||
MappingEnd,
|
||||
DocumentStart,
|
||||
DocumentEnd,
|
||||
}
|
||||
|
||||
private IEnumerator<ParseEvent> m_enumerator;
|
||||
private Int32? m_fileId;
|
||||
}
|
||||
}
|
||||
738
src/Sdk/WorkflowParser/Conversion/MatrixBuilder.cs
Normal file
738
src/Sdk/WorkflowParser/Conversion/MatrixBuilder.cs
Normal file
@@ -0,0 +1,738 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using GitHub.Actions.Expressions;
|
||||
using GitHub.Actions.Expressions.Data;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
/// <summary>
|
||||
/// Used to build a matrix cross product and apply include/exclude filters.
|
||||
/// </summary>
|
||||
internal sealed class MatrixBuilder
|
||||
{
|
||||
internal MatrixBuilder(
|
||||
TemplateContext context,
|
||||
String jobName)
|
||||
{
|
||||
m_context = context;
|
||||
m_jobName = jobName;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds an input vector. <c ref="Build" /> creates a cross product from all input vectors.
|
||||
///
|
||||
/// For example, given the matrix:
|
||||
/// arch: [x64, x86]
|
||||
/// os: [linux, windows]
|
||||
///
|
||||
/// This method should be called twice:
|
||||
/// AddVector("arch", ...);
|
||||
/// AddVector("os", ...)
|
||||
/// </summary>
|
||||
internal void AddVector(
|
||||
String name,
|
||||
SequenceToken vector)
|
||||
{
|
||||
m_vectors.Add(name, vector.ToExpressionData());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the sequence containg all exclude mappings.
|
||||
/// </summary>
|
||||
internal void Exclude(SequenceToken exclude)
|
||||
{
|
||||
m_excludeSequence = exclude;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the sequence containg all include mappings.
|
||||
/// </summary>
|
||||
internal void Include(SequenceToken include)
|
||||
{
|
||||
m_includeSequence = include;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds the matrix.
|
||||
///
|
||||
/// In addition to computing the cross product of all input vectors, this method also:
|
||||
/// 1. Applies all exclude filters against each cross product vector
|
||||
/// 2. Applies all include filters against each cross product vector, which may
|
||||
/// add additional values into existing vectors
|
||||
/// 3. Appends all unmatched include vectors, as additional result vectors
|
||||
///
|
||||
/// Example 1, simple cross product:
|
||||
/// arch: [x64, x86]
|
||||
/// os: [linux, windows]
|
||||
/// The result would contain the following vectors:
|
||||
/// [arch: x64, os: linux]
|
||||
/// [arch: x64, os: windows]
|
||||
/// [arch: x86, os: linux]
|
||||
/// [arch: x86, os: windows]
|
||||
///
|
||||
/// Example 2, using exclude filter:
|
||||
/// arch: [x64, x86]
|
||||
/// os: [linux, windows]
|
||||
/// exclude:
|
||||
/// - arch: x86
|
||||
/// os: linux
|
||||
/// The result would contain the following vectors:
|
||||
/// [arch: x64, os: linux]
|
||||
/// [arch: x64, os: windows]
|
||||
/// [arch: x86, os: windows]
|
||||
///
|
||||
/// Example 3, using include filter to add additional values:
|
||||
/// arch: [x64, x86]
|
||||
/// os: [linux, windows]
|
||||
/// include:
|
||||
/// - arch: x64
|
||||
/// os: linux
|
||||
/// publish: true
|
||||
/// The result would contain the following vectors:
|
||||
/// [arch: x64, os: linux, publish: true]
|
||||
/// [arch: x64, os: windows]
|
||||
/// [arch: x86, os: linux]
|
||||
/// [arch: x86, os: windows]
|
||||
///
|
||||
/// Example 4, include additional vectors:
|
||||
/// arch: [x64, x86]
|
||||
/// os: [linux, windows]
|
||||
/// include:
|
||||
/// - arch: x64
|
||||
/// - os: macos
|
||||
/// The result would contain the following vectors:
|
||||
/// [arch: x64, os: linux]
|
||||
/// [arch: x64, os: windows]
|
||||
/// [arch: x86, os: linux]
|
||||
/// [arch: x86, os: windows]
|
||||
/// [arch: x64, os: macos]
|
||||
/// </summary>
|
||||
/// <returns>One strategy configuration per result vector</returns>
|
||||
internal IEnumerable<StrategyConfiguration> Build()
|
||||
{
|
||||
// Parse includes/excludes
|
||||
var include = new MatrixInclude(m_context, m_vectors, m_includeSequence);
|
||||
var exclude = new MatrixExclude(m_context, m_vectors, m_excludeSequence);
|
||||
|
||||
// Calculate the cross product size
|
||||
int productSize;
|
||||
if (m_vectors.Count > 0)
|
||||
{
|
||||
productSize = 1;
|
||||
foreach (var vectorPair in m_vectors)
|
||||
{
|
||||
checked
|
||||
{
|
||||
var vector = vectorPair.Value.AssertArray("vector");
|
||||
productSize *= vector.Count;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
productSize = 0;
|
||||
}
|
||||
|
||||
var idBuilder = new IdBuilder();
|
||||
|
||||
// Cross product vectors
|
||||
for (var productIndex = 0; productIndex < productSize; productIndex++)
|
||||
{
|
||||
// Matrix
|
||||
var matrix = new DictionaryExpressionData();
|
||||
var blockSize = productSize;
|
||||
foreach (var vectorPair in m_vectors)
|
||||
{
|
||||
var vectorName = vectorPair.Key;
|
||||
var vector = vectorPair.Value.AssertArray("vector");
|
||||
blockSize = blockSize / vector.Count;
|
||||
var vectorIndex = (productIndex / blockSize) % vector.Count;
|
||||
matrix.Add(vectorName, vector[vectorIndex]);
|
||||
}
|
||||
|
||||
// Exclude
|
||||
if (exclude.Match(matrix))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Include extra values in the vector
|
||||
include.Match(matrix, out var extra);
|
||||
|
||||
// Create the configuration
|
||||
yield return CreateConfiguration(idBuilder, matrix, extra);
|
||||
}
|
||||
|
||||
// Explicit vectors
|
||||
foreach (var matrix in include.GetUnmatchedVectors())
|
||||
{
|
||||
yield return CreateConfiguration(idBuilder, matrix, null);
|
||||
}
|
||||
}
|
||||
|
||||
private StrategyConfiguration CreateConfiguration(
|
||||
IdBuilder idBuilder,
|
||||
DictionaryExpressionData matrix,
|
||||
DictionaryExpressionData extra)
|
||||
{
|
||||
// New configuration
|
||||
var configuration = new StrategyConfiguration();
|
||||
m_context.Memory.AddBytes(TemplateMemory.MinObjectSize);
|
||||
|
||||
// Gather segments for ID and display name
|
||||
var nameBuilder = new JobNameBuilder(m_jobName);
|
||||
foreach (var matrixData in matrix.Traverse(omitKeys: true))
|
||||
{
|
||||
var segment = default(String);
|
||||
if (matrixData is BooleanExpressionData || matrixData is NumberExpressionData || matrixData is StringExpressionData)
|
||||
{
|
||||
segment = matrixData.ToString();
|
||||
}
|
||||
|
||||
if (!String.IsNullOrEmpty(segment))
|
||||
{
|
||||
// ID segment
|
||||
idBuilder.AppendSegment(segment);
|
||||
|
||||
// Display name segment
|
||||
nameBuilder.AppendSegment(segment);
|
||||
}
|
||||
}
|
||||
|
||||
// Id
|
||||
configuration.Id = idBuilder.Build(allowReservedPrefix: false, maxLength: m_context.GetFeatures().ShortMatrixIds ? 25 : WorkflowConstants.MaxNodeNameLength);
|
||||
m_context.Memory.AddBytes(configuration.Id);
|
||||
|
||||
// Display name
|
||||
configuration.Name = nameBuilder.Build();
|
||||
m_context.Memory.AddBytes(configuration.Name);
|
||||
|
||||
// Extra values
|
||||
if (extra?.Count > 0)
|
||||
{
|
||||
matrix.Add(extra);
|
||||
}
|
||||
|
||||
// Matrix context
|
||||
configuration.ExpressionData.Add(WorkflowTemplateConstants.Matrix, matrix);
|
||||
m_context.Memory.AddBytes(WorkflowTemplateConstants.Matrix);
|
||||
m_context.Memory.AddBytes(matrix, traverse: true);
|
||||
|
||||
return configuration;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents the sequence "strategy.matrix.include"
|
||||
/// </summary>
|
||||
private sealed class MatrixInclude
|
||||
{
|
||||
public MatrixInclude(
|
||||
TemplateContext context,
|
||||
DictionaryExpressionData vectors,
|
||||
SequenceToken includeSequence)
|
||||
{
|
||||
// Convert to includes sets
|
||||
if (includeSequence?.Count > 0)
|
||||
{
|
||||
foreach (var includeItem in includeSequence)
|
||||
{
|
||||
var includeMapping = includeItem.AssertMapping("matrix includes item");
|
||||
|
||||
// Distinguish filters versus extra
|
||||
var filter = new MappingToken(null, null, null);
|
||||
var extra = new DictionaryExpressionData();
|
||||
foreach (var includePair in includeMapping)
|
||||
{
|
||||
var includeKeyLiteral = includePair.Key.AssertString("matrix include item key");
|
||||
if (vectors.ContainsKey(includeKeyLiteral.Value))
|
||||
{
|
||||
filter.Add(includeKeyLiteral, includePair.Value);
|
||||
}
|
||||
else
|
||||
{
|
||||
extra.Add(includeKeyLiteral.Value, includePair.Value.ToExpressionData());
|
||||
}
|
||||
}
|
||||
|
||||
// At least one filter or extra
|
||||
if (filter.Count == 0 && extra.Count == 0)
|
||||
{
|
||||
context.Error(includeMapping, $"Matrix include mapping does not contain any values");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Add filter
|
||||
m_filters.Add(new MatrixIncludeFilter(filter, extra));
|
||||
}
|
||||
}
|
||||
|
||||
m_matches = new Boolean[m_filters.Count];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Matches a vector from the cross product against each include filter.
|
||||
///
|
||||
/// For example, given the matrix:
|
||||
/// arch: [x64, x86]
|
||||
/// config: [release, debug]
|
||||
/// include:
|
||||
/// - arch: x64
|
||||
/// config: release
|
||||
/// publish: true
|
||||
///
|
||||
/// This method would return the following:
|
||||
/// Match(
|
||||
/// matrix: {arch: x64, config: release},
|
||||
/// out extra: {publish: true})
|
||||
/// => true
|
||||
///
|
||||
/// Match(
|
||||
/// matrix: {arch: x64, config: debug},
|
||||
/// out extra: null)
|
||||
/// => false
|
||||
///
|
||||
/// Match(
|
||||
/// matrix: {arch: x86, config: release},
|
||||
/// out extra: null)
|
||||
/// => false
|
||||
///
|
||||
/// Match(
|
||||
/// matrix: {arch: x86, config: debug},
|
||||
/// out extra: null)
|
||||
/// => false
|
||||
/// </summary>
|
||||
/// <param name="matrix">A vector of the cross product</param>
|
||||
/// <param name="extra">Extra values to add to the vector</param>
|
||||
/// <returns>True if the vector matched at least one include filter</returns>
|
||||
public Boolean Match(
|
||||
DictionaryExpressionData matrix,
|
||||
out DictionaryExpressionData extra)
|
||||
{
|
||||
extra = default(DictionaryExpressionData);
|
||||
for (var i = 0; i < m_filters.Count; i++)
|
||||
{
|
||||
var filter = m_filters[i];
|
||||
if (filter.Match(matrix, out var items))
|
||||
{
|
||||
m_matches[i] = true;
|
||||
|
||||
if (extra == null)
|
||||
{
|
||||
extra = new DictionaryExpressionData();
|
||||
}
|
||||
|
||||
foreach (var pair in items)
|
||||
{
|
||||
extra[pair.Key] = pair.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return extra != null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all additional vectors to add. These are additional configurations that were not produced
|
||||
/// from the cross product. These are include vectors that did not match any cross product results.
|
||||
///
|
||||
/// For example, given the matrix:
|
||||
/// arch: [x64, x86]
|
||||
/// config: [release, debug]
|
||||
/// include:
|
||||
/// - arch: arm64
|
||||
/// config: debug
|
||||
///
|
||||
/// This method would return the following:
|
||||
/// - {arch: arm64, config: debug}
|
||||
/// </summary>
|
||||
public IEnumerable<DictionaryExpressionData> GetUnmatchedVectors()
|
||||
{
|
||||
for (var i = 0; i < m_filters.Count; i++)
|
||||
{
|
||||
if (m_matches[i])
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var filter = m_filters[i];
|
||||
var matrix = new DictionaryExpressionData();
|
||||
foreach (var pair in filter.Filter)
|
||||
{
|
||||
var keyLiteral = pair.Key.AssertString("matrix include item key");
|
||||
matrix.Add(keyLiteral.Value, pair.Value.ToExpressionData());
|
||||
}
|
||||
|
||||
foreach (var includePair in filter.Extra)
|
||||
{
|
||||
matrix.Add(includePair.Key, includePair.Value);
|
||||
}
|
||||
|
||||
yield return matrix;
|
||||
}
|
||||
}
|
||||
|
||||
private readonly List<MatrixIncludeFilter> m_filters = new List<MatrixIncludeFilter>();
|
||||
|
||||
// Tracks whether a filter has been matched
|
||||
private readonly Boolean[] m_matches;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an item within the sequence "strategy.matrix.include"
|
||||
/// </summary>
|
||||
private sealed class MatrixIncludeFilter : MatrixFilter
|
||||
{
|
||||
public MatrixIncludeFilter(
|
||||
MappingToken filter,
|
||||
DictionaryExpressionData extra)
|
||||
: base(filter)
|
||||
{
|
||||
Filter = filter;
|
||||
Extra = extra;
|
||||
}
|
||||
|
||||
public Boolean Match(
|
||||
DictionaryExpressionData matrix,
|
||||
out DictionaryExpressionData extra)
|
||||
{
|
||||
if (base.Match(matrix))
|
||||
{
|
||||
extra = Extra;
|
||||
return true;
|
||||
}
|
||||
|
||||
extra = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
public DictionaryExpressionData Extra { get; }
|
||||
public MappingToken Filter { get; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents the sequence "strategy.matrix.exclude"
|
||||
/// </summary>
|
||||
private sealed class MatrixExclude
|
||||
{
|
||||
public MatrixExclude(
|
||||
TemplateContext context,
|
||||
DictionaryExpressionData vectors,
|
||||
SequenceToken excludeSequence)
|
||||
{
|
||||
// Convert to excludes sets
|
||||
if (excludeSequence?.Count > 0)
|
||||
{
|
||||
foreach (var excludeItem in excludeSequence)
|
||||
{
|
||||
var excludeMapping = excludeItem.AssertMapping("matrix excludes item");
|
||||
|
||||
// Check empty
|
||||
if (excludeMapping.Count == 0)
|
||||
{
|
||||
context.Error(excludeMapping, $"Matrix exclude filter must not be empty");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Validate first-level keys
|
||||
foreach (var excludePair in excludeMapping)
|
||||
{
|
||||
var excludeKey = excludePair.Key.AssertString("matrix excludes item key");
|
||||
if (!vectors.ContainsKey(excludeKey.Value))
|
||||
{
|
||||
context.Error(excludeKey, $"Matrix exclude key '{excludeKey.Value}' does not match any key within the matrix");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Add filter
|
||||
m_filters.Add(new MatrixExcludeFilter(excludeMapping));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Matches a vector from the cross product against each exclude filter.
|
||||
///
|
||||
/// For example, given the matrix:
|
||||
/// arch: [x64, x86]
|
||||
/// config: [release, debug]
|
||||
/// exclude:
|
||||
/// - arch: x86
|
||||
/// config: release
|
||||
///
|
||||
/// This method would return the following:
|
||||
/// Match( {arch: x64, config: release} ) => false
|
||||
/// Match( {arch: x64, config: debug} ) => false
|
||||
/// Match( {arch: x86, config: release} ) => true
|
||||
/// Match( {arch: x86, config: debug} ) => false
|
||||
/// </summary>
|
||||
/// <param name="matrix">A vector of the cross product</param>
|
||||
/// <param name="extra">Extra values to add to the vector</param>
|
||||
/// <returns>True if the vector matched at least one exclude filter</returns>
|
||||
public Boolean Match(DictionaryExpressionData matrix)
|
||||
{
|
||||
foreach (var filter in m_filters)
|
||||
{
|
||||
if (filter.Match(matrix))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private readonly List<MatrixExcludeFilter> m_filters = new List<MatrixExcludeFilter>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an item within the sequence "strategy.matrix.exclude"
|
||||
/// </summary>
|
||||
private sealed class MatrixExcludeFilter : MatrixFilter
|
||||
{
|
||||
public MatrixExcludeFilter(MappingToken filter)
|
||||
: base(filter)
|
||||
{
|
||||
}
|
||||
|
||||
public new Boolean Match(DictionaryExpressionData matrix)
|
||||
{
|
||||
return base.Match(matrix);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Base class for matrix include/exclude filters. That is, an item within the
|
||||
/// sequence "strategy.matrix.include" or within the sequence "strategy.matrix.exclude".
|
||||
/// </summary>
|
||||
private abstract class MatrixFilter
|
||||
{
|
||||
protected MatrixFilter(MappingToken matrixFilter)
|
||||
{
|
||||
// Traverse the structure and add an expression to compare each leaf node.
|
||||
// For example, given the filter:
|
||||
// versions:
|
||||
// node-version: 12
|
||||
// npm-version: 6
|
||||
// config: release
|
||||
// The following filter expressions would be created:
|
||||
// - matrix.versions.node-version == 12
|
||||
// - matrix.versions.npm-version == 6
|
||||
// - matrix.config == 'release'
|
||||
var state = new MappingState(null, matrixFilter) as TokenState;
|
||||
while (state != null)
|
||||
{
|
||||
if (state.MoveNext())
|
||||
{
|
||||
// Leaf
|
||||
if (state.Current is LiteralToken literal)
|
||||
{
|
||||
AddExpression(state.Path, literal);
|
||||
}
|
||||
// Mapping
|
||||
else if (state.Current is MappingToken mapping)
|
||||
{
|
||||
state = new MappingState(state, mapping);
|
||||
}
|
||||
// Sequence
|
||||
else if (state.Current is SequenceToken sequence)
|
||||
{
|
||||
state = new SequenceState(state, sequence);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new NotSupportedException($"Unexpected token type '{state.Current.Type}' when constructing matrix filter expressions");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
state = state.Parent;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected Boolean Match(DictionaryExpressionData matrix)
|
||||
{
|
||||
if (matrix.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Matrix filter cannot be empty");
|
||||
}
|
||||
|
||||
foreach (var expression in m_expressions)
|
||||
{
|
||||
var result = expression.Evaluate(null, null, matrix, null);
|
||||
if (result.IsFalsy)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private void AddExpression(
|
||||
String path,
|
||||
LiteralToken literal)
|
||||
{
|
||||
var expressionLiteral = default(String);
|
||||
switch (literal.Type)
|
||||
{
|
||||
case TokenType.Null:
|
||||
expressionLiteral = ExpressionConstants.Null;
|
||||
break;
|
||||
|
||||
case TokenType.Boolean:
|
||||
var booleanToken = literal as BooleanToken;
|
||||
expressionLiteral = ExpressionUtility.ConvertToParseToken(booleanToken.Value);
|
||||
break;
|
||||
|
||||
case TokenType.Number:
|
||||
var numberToken = literal as NumberToken;
|
||||
expressionLiteral = ExpressionUtility.ConvertToParseToken(numberToken.Value);
|
||||
break;
|
||||
|
||||
case TokenType.String:
|
||||
var stringToken = literal as StringToken;
|
||||
expressionLiteral = ExpressionUtility.ConvertToParseToken(stringToken.Value);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected literal type '{literal.Type}'");
|
||||
}
|
||||
|
||||
var parser = new ExpressionParser();
|
||||
var expressionString = $"{path} == {expressionLiteral}";
|
||||
var expression = parser.CreateTree(expressionString, null, s_matrixFilterNamedValues, null);
|
||||
m_expressions.Add(expression);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Used to maintain state while traversing a mapping when building filter expressions.
|
||||
/// See <see cref="MatrixFilter"/> for more info.
|
||||
/// </summary>
|
||||
private sealed class MappingState : TokenState
|
||||
{
|
||||
public MappingState(
|
||||
TokenState parent,
|
||||
MappingToken mapping)
|
||||
: base(parent)
|
||||
{
|
||||
m_mapping = mapping;
|
||||
m_index = -1;
|
||||
}
|
||||
|
||||
public override Boolean MoveNext()
|
||||
{
|
||||
if (++m_index < m_mapping.Count)
|
||||
{
|
||||
var pair = m_mapping[m_index];
|
||||
var keyLiteral = pair.Key.AssertString("matrix filter key");
|
||||
Current = pair.Value;
|
||||
var parentPath = Parent?.Path ?? WorkflowTemplateConstants.Matrix;
|
||||
Path = $"{parentPath}[{ExpressionUtility.ConvertToParseToken(keyLiteral.Value)}]";
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
Current = null;
|
||||
Path = null;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private MappingToken m_mapping;
|
||||
private Int32 m_index;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Used to maintain state while traversing a sequence when building filter expressions.
|
||||
/// See <see cref="MatrixFilter"/> for more info.
|
||||
/// </summary>
|
||||
private sealed class SequenceState : TokenState
|
||||
{
|
||||
public SequenceState(
|
||||
TokenState parent,
|
||||
SequenceToken sequence)
|
||||
: base(parent)
|
||||
{
|
||||
m_sequence = sequence;
|
||||
m_index = -1;
|
||||
}
|
||||
|
||||
public override Boolean MoveNext()
|
||||
{
|
||||
if (++m_index < m_sequence.Count)
|
||||
{
|
||||
Current = m_sequence[m_index];
|
||||
var parentPath = Parent?.Path ?? WorkflowTemplateConstants.Matrix;
|
||||
Path = $"{parentPath}[{ExpressionUtility.ConvertToParseToken((Double)m_index)}]";
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
Current = null;
|
||||
Path = null;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private SequenceToken m_sequence;
|
||||
private Int32 m_index;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Used to maintain state while traversing a mapping/sequence when building filter expressions.
|
||||
/// See <see cref="MatrixFilter"/> for more info.
|
||||
/// </summary>
|
||||
private abstract class TokenState
|
||||
{
|
||||
protected TokenState(TokenState parent)
|
||||
{
|
||||
Parent = parent;
|
||||
}
|
||||
|
||||
public TemplateToken Current { get; protected set; }
|
||||
public TokenState Parent { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The expression used to reference the current position within the structure.
|
||||
/// For example: matrix.node-version
|
||||
/// </summary>
|
||||
public String Path { get; protected set; }
|
||||
|
||||
public abstract Boolean MoveNext();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents the "matrix" context within an include/exclude expression
|
||||
/// </summary>
|
||||
private sealed class MatrixNamedValue : NamedValue
|
||||
{
|
||||
protected override Object EvaluateCore(
|
||||
EvaluationContext context,
|
||||
out ResultMemory resultMemory)
|
||||
{
|
||||
resultMemory = null;
|
||||
return context.State;
|
||||
}
|
||||
}
|
||||
|
||||
private static readonly INamedValueInfo[] s_matrixFilterNamedValues = new INamedValueInfo[]
|
||||
{
|
||||
new NamedValueInfo<MatrixNamedValue>(WorkflowTemplateConstants.Matrix),
|
||||
};
|
||||
private readonly List<IExpressionNode> m_expressions = new List<IExpressionNode>();
|
||||
}
|
||||
|
||||
private readonly TemplateContext m_context;
|
||||
private readonly String m_jobName;
|
||||
private readonly DictionaryExpressionData m_vectors = new DictionaryExpressionData();
|
||||
private SequenceToken m_excludeSequence;
|
||||
private SequenceToken m_includeSequence;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
internal static class PermissionLevelExtensions
|
||||
{
|
||||
public static bool IsLessThanOrEqualTo(
|
||||
this PermissionLevel permissionLevel,
|
||||
PermissionLevel other)
|
||||
{
|
||||
switch (permissionLevel, other)
|
||||
{
|
||||
case (PermissionLevel.NoAccess, PermissionLevel.NoAccess):
|
||||
case (PermissionLevel.NoAccess, PermissionLevel.Read):
|
||||
case (PermissionLevel.NoAccess, PermissionLevel.Write):
|
||||
case (PermissionLevel.Read, PermissionLevel.Read):
|
||||
case (PermissionLevel.Read, PermissionLevel.Write):
|
||||
case (PermissionLevel.Write, PermissionLevel.Write):
|
||||
return true;
|
||||
case (PermissionLevel.Read, PermissionLevel.NoAccess):
|
||||
case (PermissionLevel.Write, PermissionLevel.NoAccess):
|
||||
case (PermissionLevel.Write, PermissionLevel.Read):
|
||||
return false;
|
||||
default:
|
||||
throw new ArgumentException($"Invalid enum comparison: {permissionLevel} and {other}");
|
||||
}
|
||||
}
|
||||
|
||||
public static string ConvertToString(this PermissionLevel permissionLevel)
|
||||
{
|
||||
switch (permissionLevel)
|
||||
{
|
||||
case PermissionLevel.NoAccess:
|
||||
return "none";
|
||||
case PermissionLevel.Read:
|
||||
return "read";
|
||||
case PermissionLevel.Write:
|
||||
return "write";
|
||||
default:
|
||||
throw new NotSupportedException($"invalid permission level found. {permissionLevel}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
internal sealed class PermissionLevelViolation
|
||||
{
|
||||
|
||||
public PermissionLevelViolation(string permissionName, PermissionLevel requestedPermissions, PermissionLevel allowedPermissions)
|
||||
{
|
||||
PermissionName = permissionName;
|
||||
RequestedPermissionLevel = requestedPermissions;
|
||||
AllowedPermissionLevel = allowedPermissions;
|
||||
}
|
||||
|
||||
public string PermissionName
|
||||
{
|
||||
get;
|
||||
}
|
||||
|
||||
public PermissionLevel RequestedPermissionLevel
|
||||
{
|
||||
get;
|
||||
}
|
||||
public PermissionLevel AllowedPermissionLevel
|
||||
{
|
||||
get;
|
||||
}
|
||||
|
||||
public string RequestedPermissionLevelString()
|
||||
{
|
||||
return $"{PermissionName}: {RequestedPermissionLevel.ConvertToString()}";
|
||||
}
|
||||
|
||||
public string AllowedPermissionLevelString()
|
||||
{
|
||||
return $"{PermissionName}: {AllowedPermissionLevel.ConvertToString()}";
|
||||
}
|
||||
}
|
||||
}
|
||||
79
src/Sdk/WorkflowParser/Conversion/PermissionsHelper.cs
Normal file
79
src/Sdk/WorkflowParser/Conversion/PermissionsHelper.cs
Normal file
@@ -0,0 +1,79 @@
|
||||
#nullable enable
|
||||
|
||||
using System;
|
||||
using System.Linq;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
internal static class PermissionsHelper
|
||||
{
|
||||
/// <summary>
|
||||
/// Validates permissions requested in a reusable workflow do not exceed allowed permissions
|
||||
/// </summary>
|
||||
/// <param name="context">The template context</param>
|
||||
/// <param name="workflowJob">The reusable workflow job</param>
|
||||
/// <param name="embeddedJob">(Optional) Used when formatting errors related to an embedded job within the reusable workflow</param>
|
||||
/// <param name="requested">The permissions within the reusable workflow file. These may be defined either at the root of the file, or may be defined on a job within the file.</param>
|
||||
/// <param name="explicitMax">(Optional) The max permissions explicitly allowed by the caller</param>
|
||||
/// <param name="permissionsPolicy">The default permissions policy</param>
|
||||
/// <param name="isTrusted">Indicates whether the reusable workflow exists within the same trust boundary (e.g. enterprise/organization) as a the root workflow</param>
|
||||
internal static void ValidateEmbeddedPermissions(
|
||||
TemplateContext context,
|
||||
ReusableWorkflowJob workflowJob,
|
||||
IJob? embeddedJob,
|
||||
Permissions requested,
|
||||
Permissions? explicitMax,
|
||||
string permissionsPolicy,
|
||||
bool isTrusted)
|
||||
{
|
||||
if (requested == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var effectiveMax = explicitMax ?? CreatePermissionsFromPolicy(context, permissionsPolicy, includeIdToken: isTrusted, includeModels: context.GetFeatures().AllowModelsPermission);
|
||||
|
||||
if (requested.ViolatesMaxPermissions(effectiveMax, out var permissionLevelViolations))
|
||||
{
|
||||
var requestedStr = string.Join(", ", permissionLevelViolations.Select(x => x.RequestedPermissionLevelString()));
|
||||
var allowedStr = string.Join(", ", permissionLevelViolations.Select(x => x.AllowedPermissionLevelString()));
|
||||
if (embeddedJob != null)
|
||||
{
|
||||
context.Error(workflowJob.Id, $"Error calling workflow '{workflowJob.Ref}'. The nested job '{embeddedJob.Id!.Value}' is requesting '{requestedStr}', but is only allowed '{allowedStr}'.");
|
||||
}
|
||||
else
|
||||
{
|
||||
context.Error(workflowJob.Id, $"Error calling workflow '{workflowJob.Ref}'. The workflow is requesting '{requestedStr}', but is only allowed '{allowedStr}'.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates permissions based on policy
|
||||
/// </summary>
|
||||
/// <param name="context">The template context</param>
|
||||
/// <param name="permissionsPolicy">The permissions policy</param>
|
||||
/// <param name="includeIdToken">Indicates whether the permissions should include an ID token</param>
|
||||
private static Permissions CreatePermissionsFromPolicy(
|
||||
TemplateContext context,
|
||||
string permissionsPolicy,
|
||||
bool includeIdToken,
|
||||
bool includeModels)
|
||||
{
|
||||
switch (permissionsPolicy)
|
||||
{
|
||||
case WorkflowConstants.PermissionsPolicy.LimitedRead:
|
||||
return new Permissions(PermissionLevel.NoAccess, includeIdToken: false, includeAttestations: false, includeModels: false)
|
||||
{
|
||||
Contents = PermissionLevel.Read,
|
||||
Packages = PermissionLevel.Read,
|
||||
};
|
||||
case WorkflowConstants.PermissionsPolicy.Write:
|
||||
return new Permissions(PermissionLevel.Write, includeIdToken: includeIdToken, includeAttestations: true, includeModels: includeModels);
|
||||
default:
|
||||
throw new ArgumentException($"Unexpected permission policy: '{permissionsPolicy}'");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
272
src/Sdk/WorkflowParser/Conversion/ReusableWorkflowsLoader.cs
Normal file
272
src/Sdk/WorkflowParser/Conversion/ReusableWorkflowsLoader.cs
Normal file
@@ -0,0 +1,272 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using GitHub.Actions.Expressions;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using GitHub.Actions.WorkflowParser.Conversion;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||
|
||||
/// <summary>
|
||||
/// Loads reusable workflows
|
||||
/// </summary>
|
||||
internal sealed class ReusableWorkflowsLoader
|
||||
{
|
||||
private ReusableWorkflowsLoader(
|
||||
IServerTraceWriter serverTrace,
|
||||
ITraceWriter trace,
|
||||
ParseOptions options,
|
||||
WorkflowUsage usage,
|
||||
TemplateContext context,
|
||||
WorkflowTemplate workflowTemplate,
|
||||
YamlTemplateLoader loader,
|
||||
String permissionPolicy,
|
||||
IDictionary<string, ReferencedWorkflow> referencedWorkflows)
|
||||
{
|
||||
m_serverTrace = serverTrace ?? new EmptyServerTraceWriter();
|
||||
m_trace = trace ?? new EmptyTraceWriter();
|
||||
m_parseOptions = new ParseOptions(options ?? throw new ArgumentNullException(nameof(options)));
|
||||
m_usage = usage ?? throw new ArgumentNullException(nameof(usage));
|
||||
m_context = context ?? throw new ArgumentNullException(nameof(context));
|
||||
m_workflowTemplate = workflowTemplate ?? throw new ArgumentNullException(nameof(workflowTemplate));
|
||||
m_loader = loader ?? throw new ArgumentNullException(nameof(loader));
|
||||
m_permissionPolicy = permissionPolicy ?? throw new ArgumentNullException(nameof(permissionPolicy));
|
||||
m_referencedWorkflows = referencedWorkflows ?? throw new ArgumentNullException(nameof(referencedWorkflows));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Loads reusable workflows if not in an error state.
|
||||
///
|
||||
/// Any new errors are recorded to both <c ref="TemplateContext.Errors" /> and <c ref="WorkflowTemplate.Errors" />.
|
||||
/// </summary>
|
||||
public static void Load(
|
||||
IServerTraceWriter serverTrace,
|
||||
ITraceWriter trace,
|
||||
ParseOptions options,
|
||||
WorkflowUsage usage,
|
||||
TemplateContext context,
|
||||
WorkflowTemplate workflowTemplate,
|
||||
YamlTemplateLoader loader,
|
||||
String permissionPolicy,
|
||||
IDictionary<string, ReferencedWorkflow> referencedWorkflows)
|
||||
{
|
||||
new ReusableWorkflowsLoader(serverTrace, trace, options, usage, context, workflowTemplate, loader, permissionPolicy, referencedWorkflows)
|
||||
.Load();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Refer overload
|
||||
/// </summary>
|
||||
private void Load()
|
||||
{
|
||||
// Skip reusable workflows?
|
||||
if (m_parseOptions.SkipReusableWorkflows)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Check errors
|
||||
if (m_context.Errors.Count > 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Note, the "finally" block appends context.Errors to workflowTemplate
|
||||
var hasReusableWorkflowJob = false;
|
||||
try
|
||||
{
|
||||
foreach (var job in m_workflowTemplate.Jobs)
|
||||
{
|
||||
// Load reusable workflow
|
||||
if (job is ReusableWorkflowJob workflowJob)
|
||||
{
|
||||
hasReusableWorkflowJob = true;
|
||||
LoadRecursive(workflowJob);
|
||||
|
||||
// Check errors
|
||||
if (m_context.Errors.Count > 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (ReferencedWorkflowNotFoundException)
|
||||
{
|
||||
// Long term, catch TemplateUserException and let others bubble
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
m_context.Errors.Add(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Append context.Errors to workflowTemplate
|
||||
if (m_context.Errors.Count > 0)
|
||||
{
|
||||
foreach (var error in m_context.Errors)
|
||||
{
|
||||
m_workflowTemplate.Errors.Add(new WorkflowValidationError(error.Code, error.Message));
|
||||
}
|
||||
}
|
||||
|
||||
// Update WorkflowTemplate.FileTable with referenced workflows
|
||||
if (hasReusableWorkflowJob)
|
||||
{
|
||||
m_workflowTemplate.FileTable.Clear();
|
||||
m_workflowTemplate.FileTable.AddRange(m_context.GetFileTable());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This loads referenced workflow by parsing the workflow file and converting to workflow template WorkflowJob.
|
||||
/// </summary>
|
||||
private void LoadRecursive(
|
||||
ReusableWorkflowJob workflowJob,
|
||||
int depth = 1)
|
||||
{
|
||||
// Check depth
|
||||
if (depth > m_parseOptions.MaxNestedReusableWorkflowsDepth)
|
||||
{
|
||||
throw new Exception($"Nested reusable workflow depth exceeded {m_parseOptions.MaxNestedReusableWorkflowsDepth}.");
|
||||
}
|
||||
|
||||
TemplateToken tokens;
|
||||
|
||||
// Load the reusable workflow
|
||||
try
|
||||
{
|
||||
// Fully qualify workflow ref
|
||||
workflowJob.Ref = FullyQualifyWorkflowRef(m_context, workflowJob.Ref, m_referencedWorkflows);
|
||||
var isTrusted = IsReferencedWorkflowTrusted(workflowJob.Ref.Value);
|
||||
|
||||
// Parse template tokens
|
||||
tokens = m_loader.ParseWorkflow(m_context, workflowJob.Ref.Value);
|
||||
|
||||
// Gather telemetry
|
||||
m_usage.Gather(m_context, tokens);
|
||||
|
||||
// Check errors
|
||||
if (m_context.Errors.Count > 0)
|
||||
{
|
||||
// Short-circuit
|
||||
return;
|
||||
}
|
||||
|
||||
// Convert to workflow types
|
||||
WorkflowTemplateConverter.ConvertToReferencedWorkflow(m_context, tokens, workflowJob, m_permissionPolicy, isTrusted);
|
||||
|
||||
// Check errors
|
||||
if (m_context.Errors.Count > 0)
|
||||
{
|
||||
// Short-circuit
|
||||
return;
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Prefix errors with caller file/line/col
|
||||
PrefixErrorsWithCallerInfo(workflowJob);
|
||||
}
|
||||
|
||||
// Load nested reusable workflows
|
||||
foreach (var nestedJob in workflowJob.Jobs)
|
||||
{
|
||||
if (nestedJob is ReusableWorkflowJob nestedWorkflowJob)
|
||||
{
|
||||
// Recurse
|
||||
LoadRecursive(nestedWorkflowJob, depth + 1);
|
||||
|
||||
// Check errors
|
||||
if (m_context.Errors.Count > 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// For the given token and referencedWorkflows, resolve the workflow reference (i.e. token value)
|
||||
/// This ensures that the workflow reference is the fully qualified form (nwo+path+version) even when calling local workflows without nwo or version
|
||||
/// </summary>
|
||||
internal static StringToken FullyQualifyWorkflowRef(
|
||||
TemplateContext context,
|
||||
StringToken workflowJobRef,
|
||||
IDictionary<string, ReferencedWorkflow> referencedWorkflows)
|
||||
{
|
||||
if (!workflowJobRef.Value.StartsWith(WorkflowTemplateConstants.LocalPrefix))
|
||||
{
|
||||
return workflowJobRef;
|
||||
}
|
||||
|
||||
var callerPath = context.GetFileName(workflowJobRef.FileId.Value);
|
||||
if (!referencedWorkflows.TryGetValue(callerPath, out ReferencedWorkflow callerWorkflow) || callerWorkflow == null)
|
||||
{
|
||||
throw new ReferencedWorkflowNotFoundException($"Cannot find the caller workflow from the referenced workflows: '{callerPath}'");
|
||||
}
|
||||
|
||||
var filePath = workflowJobRef.Value.Substring(WorkflowTemplateConstants.LocalPrefix.Length);
|
||||
var path = $"{callerWorkflow.Repository}/{filePath}@{callerWorkflow.ResolvedSha}";
|
||||
|
||||
return new StringToken(workflowJobRef.FileId, workflowJobRef.Line, workflowJobRef.Column, path);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prefixes all error messages with the caller file/line/column.
|
||||
/// </summary>
|
||||
private void PrefixErrorsWithCallerInfo(ReusableWorkflowJob workflowJob)
|
||||
{
|
||||
if (m_context.Errors.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var callerFile = m_context.GetFileName(workflowJob.Ref.FileId.Value);
|
||||
for (int i = 0; i < m_context.Errors.Count; i++)
|
||||
{
|
||||
var errorMessage = m_context.Errors.GetMessage(i);
|
||||
if (String.IsNullOrEmpty(errorMessage) || !errorMessage.StartsWith(callerFile))
|
||||
{
|
||||
// when there is no caller file in the error message, we add it for annotation
|
||||
m_context.Errors.PrefixMessage(
|
||||
i,
|
||||
TemplateStrings.CalledWorkflowNotValidWithErrors(
|
||||
callerFile,
|
||||
TemplateStrings.LineColumn(workflowJob.Ref.Line, workflowJob.Ref.Column)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the given workflowJobRefValue is trusted
|
||||
/// </summary>
|
||||
private bool IsReferencedWorkflowTrusted(String workflowJobRefValue)
|
||||
{
|
||||
if (m_referencedWorkflows.TryGetValue(workflowJobRefValue, out ReferencedWorkflow referencedWorkflow) &&
|
||||
referencedWorkflow != null)
|
||||
{
|
||||
return referencedWorkflow.IsTrusted();
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private readonly TemplateContext m_context;
|
||||
private readonly YamlTemplateLoader m_loader;
|
||||
private readonly ParseOptions m_parseOptions;
|
||||
private readonly string m_permissionPolicy;
|
||||
private readonly IDictionary<string, ReferencedWorkflow> m_referencedWorkflows;
|
||||
private readonly IServerTraceWriter m_serverTrace;
|
||||
private readonly ITraceWriter m_trace;
|
||||
private readonly WorkflowUsage m_usage;
|
||||
private readonly WorkflowTemplate m_workflowTemplate;
|
||||
}
|
||||
}
|
||||
76
src/Sdk/WorkflowParser/Conversion/TemplateTokenExtensions.cs
Normal file
76
src/Sdk/WorkflowParser/Conversion/TemplateTokenExtensions.cs
Normal file
@@ -0,0 +1,76 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using GitHub.Actions.Expressions.Data;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
internal static class TemplateTokenExtensions
|
||||
{
|
||||
public static ArrayExpressionData ToExpressionData(this SequenceToken sequence)
|
||||
{
|
||||
var token = sequence as TemplateToken;
|
||||
var expressionData = token.ToExpressionData();
|
||||
return expressionData.AssertArray("converted sequence token");
|
||||
}
|
||||
|
||||
public static DictionaryExpressionData ToExpressionData(this MappingToken mapping)
|
||||
{
|
||||
var token = mapping as TemplateToken;
|
||||
var expressionData = token.ToExpressionData();
|
||||
return expressionData.AssertDictionary("converted mapping token");
|
||||
}
|
||||
|
||||
public static ExpressionData ToExpressionData(this TemplateToken token)
|
||||
{
|
||||
switch (token.Type)
|
||||
{
|
||||
case TokenType.Mapping:
|
||||
var mapping = token as MappingToken;
|
||||
var dictionary = new DictionaryExpressionData();
|
||||
if (mapping.Count > 0)
|
||||
{
|
||||
foreach (var pair in mapping)
|
||||
{
|
||||
var keyLiteral = pair.Key.AssertString("dictionary context data key");
|
||||
var key = keyLiteral.Value;
|
||||
var value = pair.Value.ToExpressionData();
|
||||
dictionary.Add(key, value);
|
||||
}
|
||||
}
|
||||
return dictionary;
|
||||
|
||||
case TokenType.Sequence:
|
||||
var sequence = token as SequenceToken;
|
||||
var array = new ArrayExpressionData();
|
||||
if (sequence.Count > 0)
|
||||
{
|
||||
foreach (var item in sequence)
|
||||
{
|
||||
array.Add(item.ToExpressionData());
|
||||
}
|
||||
}
|
||||
return array;
|
||||
|
||||
case TokenType.Null:
|
||||
return null;
|
||||
|
||||
case TokenType.Boolean:
|
||||
var boolean = token as BooleanToken;
|
||||
return new BooleanExpressionData(boolean.Value);
|
||||
|
||||
case TokenType.Number:
|
||||
var number = token as NumberToken;
|
||||
return new NumberExpressionData(number.Value);
|
||||
|
||||
case TokenType.String:
|
||||
var stringToken = token as StringToken;
|
||||
return new StringExpressionData(stringToken.Value);
|
||||
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected {nameof(TemplateToken)} type '{token.Type}'");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
63
src/Sdk/WorkflowParser/Conversion/WorkflowSchemaFactory.cs
Normal file
63
src/Sdk/WorkflowParser/Conversion/WorkflowSchemaFactory.cs
Normal file
@@ -0,0 +1,63 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
/// <summary>
|
||||
/// Loads the schema for workflows
|
||||
/// </summary>
|
||||
internal static class WorkflowSchemaFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Loads the template schema for the specified features.
|
||||
/// </summary>
|
||||
internal static TemplateSchema GetSchema(WorkflowFeatures features)
|
||||
{
|
||||
if (features == null)
|
||||
{
|
||||
throw new System.ArgumentNullException(nameof(features));
|
||||
}
|
||||
|
||||
// Find resource names corresponding to enabled features
|
||||
var resourceNames = WorkflowFeatures.Names
|
||||
.Where(x => features.GetFeature(x)) // Enabled features only
|
||||
.Select(x => string.Concat(c_resourcePrefix, "-", x, c_resourceSuffix)) // To resource name
|
||||
.Where(x => s_resourceNames.Contains(x)) // Resource must exist
|
||||
.ToList();
|
||||
|
||||
// More than one resource found?
|
||||
if (resourceNames.Count > 1)
|
||||
{
|
||||
throw new NotSupportedException("Failed to load workflow schema. Only one feature flag with schema changes can be enabled at a time.");
|
||||
}
|
||||
|
||||
var resourceName = resourceNames.FirstOrDefault() ?? c_defaultResourceName;
|
||||
return s_schemas.GetOrAdd(
|
||||
resourceName,
|
||||
(resourceName) =>
|
||||
{
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var json = default(String);
|
||||
using (var stream = assembly.GetManifestResourceStream(resourceName)!)
|
||||
using (var streamReader = new StreamReader(stream))
|
||||
{
|
||||
json = streamReader.ReadToEnd();
|
||||
}
|
||||
|
||||
var objectReader = new JsonObjectReader(null, json);
|
||||
return TemplateSchema.Load(objectReader);
|
||||
});
|
||||
}
|
||||
|
||||
private const string c_resourcePrefix = "GitHub.Actions.WorkflowParser.workflow-v1.0";
|
||||
private const string c_resourceSuffix = ".json";
|
||||
private const string c_defaultResourceName = c_resourcePrefix + c_resourceSuffix;
|
||||
private static readonly HashSet<string> s_resourceNames = Assembly.GetExecutingAssembly().GetManifestResourceNames().ToHashSet(StringComparer.Ordinal);
|
||||
private static readonly ConcurrentDictionary<string, TemplateSchema> s_schemas = new(StringComparer.Ordinal);
|
||||
}
|
||||
}
|
||||
121
src/Sdk/WorkflowParser/Conversion/WorkflowTemplateConstants.cs
Normal file
121
src/Sdk/WorkflowParser/Conversion/WorkflowTemplateConstants.cs
Normal file
@@ -0,0 +1,121 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
internal static class WorkflowTemplateConstants
|
||||
{
|
||||
public const String Always = "always";
|
||||
public const String BooleanNeedsContext = "boolean-needs-context";
|
||||
public const String BooleanStepsContext = "boolean-steps-context";
|
||||
public const String BooleanStrategyContext = "boolean-strategy-context";
|
||||
public const String CancelInProgress = "cancel-in-progress";
|
||||
public const String CancelTimeoutMinutes = "cancel-timeout-minutes";
|
||||
public const String Cancelled = "cancelled";
|
||||
public const String Concurrency = "concurrency";
|
||||
public const String Container = "container";
|
||||
public const String ContinueOnError = "continue-on-error";
|
||||
public const String Credentials = "credentials";
|
||||
public const String Default = "default";
|
||||
public const String Defaults = "defaults";
|
||||
public const String Description = "description";
|
||||
public const String DockerUriPrefix = "docker://";
|
||||
public const String EmbeddedConcurrency = "embedded-concurrency";
|
||||
public const String Env = "env";
|
||||
public const String Ent = "ent";
|
||||
public const String Enterprise = "enterprise";
|
||||
public const String Environment = "environment";
|
||||
public const String Event = "event";
|
||||
public const String EventName = "event_name";
|
||||
public const String EventPattern = "github.event";
|
||||
public const String Exclude = "exclude";
|
||||
public const String FailFast = "fail-fast";
|
||||
public const String Failure = "failure";
|
||||
public const String GitHub = "github";
|
||||
public const String Group = "group";
|
||||
public const String HashFiles = "hashFiles";
|
||||
public const String Id = "id";
|
||||
public const String If = "if";
|
||||
public const String Image = "image";
|
||||
public const String ImageName = "image-name";
|
||||
public const String CustomImageVersion = "version";
|
||||
public const String Include = "include";
|
||||
public const String Inherit = "inherit";
|
||||
public const String Inputs = "inputs";
|
||||
public const String InputsPattern = "inputs.*";
|
||||
public const String Job = "job";
|
||||
public const String JobConcurrency = "job-concurrency";
|
||||
public const String JobDefaultsRun = "job-defaults-run";
|
||||
public const String JobEnvironment = "job-environment";
|
||||
public const String JobIfResult = "job-if-result";
|
||||
public const String JobOutputs = "job-outputs";
|
||||
public const String Jobs = "jobs";
|
||||
public const String JobsPattern = "jobs.*";
|
||||
public const String JobsOutputsPattern = "jobs.*.outputs";
|
||||
public const String Labels = "labels";
|
||||
public const String LocalPrefix = "./";
|
||||
public const String Matrix = "matrix";
|
||||
public const String MaxParallel = "max-parallel";
|
||||
public const String Name = "name";
|
||||
public const String Needs = "needs";
|
||||
public const String NumberNeedsContext = "number-needs-context";
|
||||
public const String NumberStepsContext = "number-steps-context";
|
||||
public const String NumberStrategyContext = "number-strategy-context";
|
||||
public const String On = "on";
|
||||
public const String Options = "options";
|
||||
public const String Org = "org";
|
||||
public const String Organization = "organization";
|
||||
public const String Outputs = "outputs";
|
||||
public const String OutputsPattern = "needs.*.outputs";
|
||||
public const String Password = "password";
|
||||
public const String Permissions = "permissions";
|
||||
public const String Pool = "pool";
|
||||
public const String Ports = "ports";
|
||||
public const String Required = "required";
|
||||
public const String Result = "result";
|
||||
public const String Run = "run";
|
||||
public const String RunName = "run-name";
|
||||
public const String Runner = "runner";
|
||||
public const String RunsOn = "runs-on";
|
||||
public const String Secret = "secret";
|
||||
public const String Secrets = "secrets";
|
||||
public const String Services = "services";
|
||||
public const String Shell = "shell";
|
||||
public const String Skipped = "skipped";
|
||||
public const String Slash = "/";
|
||||
public const String Snapshot = "snapshot";
|
||||
public const String StepEnv = "step-env";
|
||||
public const String StepIfResult = "step-if-result";
|
||||
public const String StepWith = "step-with";
|
||||
public const String Steps = "steps";
|
||||
public const String Strategy = "strategy";
|
||||
public const String StringNeedsContext = "string-needs-context";
|
||||
public const String StringRunnerContextNoSecrets = "string-runner-context-no-secrets";
|
||||
public const String StringStepsContext = "string-steps-context";
|
||||
public const String StringStrategyContext = "string-strategy-context";
|
||||
public const String Success = "success";
|
||||
public const String TimeoutMinutes = "timeout-minutes";
|
||||
public const String Type = "type";
|
||||
public const String TypeString = "string";
|
||||
public const String TypeBoolean = "boolean";
|
||||
public const String TypeNumber = "number";
|
||||
public const String Url = "url";
|
||||
public const String Username = "username";
|
||||
public const String Uses = "uses";
|
||||
public const String Vars = "vars";
|
||||
public const String VarsPattern = "vars.*";
|
||||
public const String VmImage = "vmImage";
|
||||
public const String Volumes = "volumes";
|
||||
public const String With = "with";
|
||||
public const String Workflow = "workflow";
|
||||
public const String Workflow_1_0 = "workflow-v1.0";
|
||||
public const String WorkflowCall = "workflow_call";
|
||||
public const String WorkflowCallInputs = "workflow-call-inputs";
|
||||
public const String WorkflowCallOutputs = "workflow-call-outputs";
|
||||
public const String WorkflowConcurrency = "workflow-concurrency";
|
||||
public const String WorkflowDispatch = "workflow_dispatch";
|
||||
public const String WorkflowJobSecrets = "workflow-job-secrets";
|
||||
public const String WorkflowJobWith = "workflow-job-with";
|
||||
public const String WorkflowRoot = "workflow-root";
|
||||
public const String WorkingDirectory = "working-directory";
|
||||
}
|
||||
}
|
||||
2242
src/Sdk/WorkflowParser/Conversion/WorkflowTemplateConverter.cs
Normal file
2242
src/Sdk/WorkflowParser/Conversion/WorkflowTemplateConverter.cs
Normal file
File diff suppressed because it is too large
Load Diff
805
src/Sdk/WorkflowParser/Conversion/YamlObjectReader.cs
Normal file
805
src/Sdk/WorkflowParser/Conversion/YamlObjectReader.cs
Normal file
@@ -0,0 +1,805 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
|
||||
using YamlDotNet.Core;
|
||||
using YamlDotNet.Core.Events;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
/// <summary>
|
||||
/// Converts a YAML file into a TemplateToken
|
||||
/// </summary>
|
||||
internal sealed class YamlObjectReader : IObjectReader
|
||||
{
|
||||
internal YamlObjectReader(
|
||||
Int32? fileId,
|
||||
TextReader input,
|
||||
Boolean allowAnchors = false,
|
||||
Telemetry telemetry = null)
|
||||
{
|
||||
m_fileId = fileId;
|
||||
m_parser = new Parser(input);
|
||||
m_allowAnchors = allowAnchors;
|
||||
m_telemetry = telemetry ?? new Telemetry();
|
||||
m_events = new List<ParsingEvent>();
|
||||
m_anchors = new Dictionary<String, Int32>();
|
||||
m_replay = new Stack<YamlReplayState>();
|
||||
}
|
||||
|
||||
public Boolean AllowLiteral(out LiteralToken value)
|
||||
{
|
||||
if (EvaluateCurrent() is Scalar scalar)
|
||||
{
|
||||
// Tag specified
|
||||
if (!String.IsNullOrEmpty(scalar.Tag))
|
||||
{
|
||||
// String tag
|
||||
if (String.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
|
||||
{
|
||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Not plain style
|
||||
if (scalar.Style != ScalarStyle.Plain)
|
||||
{
|
||||
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
|
||||
}
|
||||
|
||||
// Boolean, Float, Integer, or Null
|
||||
switch (scalar.Tag)
|
||||
{
|
||||
case c_booleanTag:
|
||||
value = ParseBoolean(scalar);
|
||||
break;
|
||||
case c_floatTag:
|
||||
value = ParseFloat(scalar);
|
||||
break;
|
||||
case c_integerTag:
|
||||
value = ParseInteger(scalar);
|
||||
break;
|
||||
case c_nullTag:
|
||||
value = ParseNull(scalar);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
|
||||
}
|
||||
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
if (scalar.Style == ScalarStyle.Plain)
|
||||
{
|
||||
if (MatchNull(scalar, out var nullToken))
|
||||
{
|
||||
value = nullToken;
|
||||
}
|
||||
else if (MatchBoolean(scalar, out var booleanToken))
|
||||
{
|
||||
value = booleanToken;
|
||||
}
|
||||
else if (MatchInteger(scalar, out var numberToken) ||
|
||||
MatchFloat(scalar, out numberToken))
|
||||
{
|
||||
value = numberToken;
|
||||
}
|
||||
else
|
||||
{
|
||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
||||
}
|
||||
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise assume string
|
||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowSequenceStart(out SequenceToken value)
|
||||
{
|
||||
if (EvaluateCurrent() is SequenceStart sequenceStart)
|
||||
{
|
||||
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowSequenceEnd()
|
||||
{
|
||||
if (EvaluateCurrent() is SequenceEnd)
|
||||
{
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowMappingStart(out MappingToken value)
|
||||
{
|
||||
if (EvaluateCurrent() is MappingStart mappingStart)
|
||||
{
|
||||
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowMappingEnd()
|
||||
{
|
||||
if (EvaluateCurrent() is MappingEnd)
|
||||
{
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
|
||||
/// </summary>
|
||||
public void ValidateEnd()
|
||||
{
|
||||
if (EvaluateCurrent() is DocumentEnd)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected document end parse event");
|
||||
}
|
||||
|
||||
if (EvaluateCurrent() is StreamEnd)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected stream end parse event");
|
||||
}
|
||||
|
||||
if (MoveNext())
|
||||
{
|
||||
throw new InvalidOperationException("Expected end of parse events");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
|
||||
/// </summary>
|
||||
public void ValidateStart()
|
||||
{
|
||||
if (EvaluateCurrent() != null)
|
||||
{
|
||||
throw new InvalidOperationException("Unexpected parser state");
|
||||
}
|
||||
|
||||
if (!MoveNext())
|
||||
{
|
||||
throw new InvalidOperationException("Expected a parse event");
|
||||
}
|
||||
|
||||
if (EvaluateCurrent() is StreamStart)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected stream start parse event");
|
||||
}
|
||||
|
||||
if (EvaluateCurrent() is DocumentStart)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected document start parse event");
|
||||
}
|
||||
}
|
||||
|
||||
private ParsingEvent EvaluateCurrent_Legacy()
|
||||
{
|
||||
if (m_current == null)
|
||||
{
|
||||
m_current = m_parser.Current;
|
||||
if (m_current != null)
|
||||
{
|
||||
if (m_current is Scalar scalar)
|
||||
{
|
||||
// Verify not using achors
|
||||
if (scalar.Anchor != null)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
|
||||
}
|
||||
}
|
||||
else if (m_current is MappingStart mappingStart)
|
||||
{
|
||||
// Verify not using achors
|
||||
if (mappingStart.Anchor != null)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
|
||||
}
|
||||
}
|
||||
else if (m_current is SequenceStart sequenceStart)
|
||||
{
|
||||
// Verify not using achors
|
||||
if (sequenceStart.Anchor != null)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
|
||||
}
|
||||
}
|
||||
else if (!(m_current is MappingEnd) &&
|
||||
!(m_current is SequenceEnd) &&
|
||||
!(m_current is DocumentStart) &&
|
||||
!(m_current is DocumentEnd) &&
|
||||
!(m_current is StreamStart) &&
|
||||
!(m_current is StreamEnd))
|
||||
{
|
||||
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return m_current;
|
||||
}
|
||||
|
||||
private ParsingEvent EvaluateCurrent()
|
||||
{
|
||||
if (!m_allowAnchors)
|
||||
{
|
||||
return EvaluateCurrent_Legacy();
|
||||
}
|
||||
|
||||
return m_current;
|
||||
}
|
||||
|
||||
private Boolean MoveNext_Legacy()
|
||||
{
|
||||
m_current = null;
|
||||
return m_parser.MoveNext();
|
||||
}
|
||||
|
||||
private Boolean MoveNext()
|
||||
{
|
||||
if (!m_allowAnchors)
|
||||
{
|
||||
return MoveNext_Legacy();
|
||||
}
|
||||
|
||||
// Replaying an anchor?
|
||||
// Adjust depth.
|
||||
// Pop if done.
|
||||
if (m_replay.Count > 0)
|
||||
{
|
||||
var replay = m_replay.Peek();
|
||||
|
||||
if (m_current is Scalar)
|
||||
{
|
||||
// Done?
|
||||
if (replay.Depth == 0)
|
||||
{
|
||||
// Pop
|
||||
m_replay.Pop();
|
||||
}
|
||||
}
|
||||
else if (m_current is SequenceStart || m_current is MappingStart)
|
||||
{
|
||||
// Increment depth
|
||||
replay.Depth++;
|
||||
}
|
||||
else if (m_current is SequenceEnd || m_current is MappingEnd)
|
||||
{
|
||||
// Decrement depth
|
||||
replay.Depth--;
|
||||
|
||||
// Done?
|
||||
if (replay.Depth == 0)
|
||||
{
|
||||
// Pop
|
||||
m_replay.Pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Still replaying?
|
||||
if (m_replay.Count > 0)
|
||||
{
|
||||
var replay = m_replay.Peek();
|
||||
|
||||
// Move next
|
||||
replay.Index++;
|
||||
|
||||
// Store current
|
||||
m_current = m_events[replay.Index];
|
||||
}
|
||||
// Not replaying
|
||||
else
|
||||
{
|
||||
// Move next
|
||||
if (!m_parser.MoveNext())
|
||||
{
|
||||
// Clear current
|
||||
m_current = null;
|
||||
|
||||
// Short-circuit
|
||||
return false;
|
||||
}
|
||||
|
||||
// Store current
|
||||
m_current = m_parser.Current;
|
||||
|
||||
// Store event
|
||||
m_events.Add(m_current);
|
||||
|
||||
// Anchor?
|
||||
var anchor = (m_current as NodeEvent)?.Anchor;
|
||||
if (anchor != null)
|
||||
{
|
||||
// Not allowed?
|
||||
if (!m_allowAnchors)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{anchor}'");
|
||||
}
|
||||
|
||||
// Validate node type
|
||||
if (m_current is not Scalar && m_current is not MappingStart && m_current is not SequenceStart)
|
||||
{
|
||||
throw new InvalidOperationException($"Unexpected node type with anchor '{anchor}': {m_current.GetType().Name}");
|
||||
}
|
||||
|
||||
// Store anchor index
|
||||
m_anchors[anchor] = m_events.Count - 1;
|
||||
|
||||
// Count anchors
|
||||
m_telemetry.YamlAnchors++;
|
||||
}
|
||||
|
||||
// Count aliases
|
||||
if (m_current is AnchorAlias)
|
||||
{
|
||||
m_telemetry.YamlAliases++;
|
||||
}
|
||||
|
||||
// Validate node type
|
||||
if (m_current is not Scalar &&
|
||||
m_current is not MappingStart &&
|
||||
m_current is not MappingEnd &&
|
||||
m_current is not SequenceStart &&
|
||||
m_current is not SequenceEnd &&
|
||||
m_current is not DocumentStart &&
|
||||
m_current is not DocumentEnd &&
|
||||
m_current is not StreamStart &&
|
||||
m_current is not StreamEnd &&
|
||||
m_current is not AnchorAlias)
|
||||
{
|
||||
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
|
||||
}
|
||||
}
|
||||
|
||||
// Alias?
|
||||
if (m_current is AnchorAlias alias)
|
||||
{
|
||||
// Anchor index
|
||||
if (!m_anchors.TryGetValue(alias.Value, out var anchorIndex))
|
||||
{
|
||||
throw new InvalidOperationException($"Unknown anchor '{alias.Value}'");
|
||||
}
|
||||
|
||||
// Move to anchor
|
||||
m_current = m_events[anchorIndex];
|
||||
|
||||
// Push replay state
|
||||
m_replay.Push(new YamlReplayState { Index = anchorIndex, Depth = 0 });
|
||||
}
|
||||
|
||||
// Max nodes traversed?
|
||||
m_numNodes++;
|
||||
if (m_numNodes > c_maxYamlNodes)
|
||||
{
|
||||
throw new InvalidOperationException("Maximum YAML nodes exceeded");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private BooleanToken ParseBoolean(Scalar scalar)
|
||||
{
|
||||
if (MatchBoolean(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_booleanTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private NumberToken ParseFloat(Scalar scalar)
|
||||
{
|
||||
if (MatchFloat(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private NumberToken ParseInteger(Scalar scalar)
|
||||
{
|
||||
if (MatchInteger(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private NullToken ParseNull(Scalar scalar)
|
||||
{
|
||||
if (MatchNull(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_nullTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private Boolean MatchBoolean(
|
||||
Scalar scalar,
|
||||
out BooleanToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
switch (scalar.Value ?? String.Empty)
|
||||
{
|
||||
case "true":
|
||||
case "True":
|
||||
case "TRUE":
|
||||
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
|
||||
return true;
|
||||
case "false":
|
||||
case "False":
|
||||
case "FALSE":
|
||||
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private Boolean MatchFloat(
|
||||
Scalar scalar,
|
||||
out NumberToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
var str = scalar.Value;
|
||||
if (!String.IsNullOrEmpty(str))
|
||||
{
|
||||
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
|
||||
switch (str)
|
||||
{
|
||||
case ".inf":
|
||||
case ".Inf":
|
||||
case ".INF":
|
||||
case "+.inf":
|
||||
case "+.Inf":
|
||||
case "+.INF":
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
|
||||
return true;
|
||||
case "-.inf":
|
||||
case "-.Inf":
|
||||
case "-.INF":
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
|
||||
return true;
|
||||
case ".nan":
|
||||
case ".NaN":
|
||||
case ".NAN":
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
|
||||
|
||||
// Skip leading sign
|
||||
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
|
||||
|
||||
// Check for integer portion
|
||||
var length = str.Length;
|
||||
var hasInteger = false;
|
||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
||||
{
|
||||
hasInteger = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for decimal point
|
||||
var hasDot = false;
|
||||
if (index < length && str[index] == '.')
|
||||
{
|
||||
hasDot = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for decimal portion
|
||||
var hasDecimal = false;
|
||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
||||
{
|
||||
hasDecimal = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
|
||||
if ((hasDot && hasDecimal) || hasInteger)
|
||||
{
|
||||
// Check for end
|
||||
if (index == length)
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
||||
return true;
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
else
|
||||
{
|
||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
||||
}
|
||||
}
|
||||
// Check [eE][-+]?[0-9]
|
||||
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
|
||||
{
|
||||
index++;
|
||||
|
||||
// Skip sign
|
||||
if (index < length && (str[index] == '-' || str[index] == '+'))
|
||||
{
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for exponent
|
||||
var hasExponent = false;
|
||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
||||
{
|
||||
hasExponent = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for end
|
||||
if (hasExponent && index == length)
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
|
||||
return true;
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
else
|
||||
{
|
||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private Boolean MatchInteger(
|
||||
Scalar scalar,
|
||||
out NumberToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
var str = scalar.Value;
|
||||
if (!String.IsNullOrEmpty(str))
|
||||
{
|
||||
// Check for [0-9]+
|
||||
var firstChar = str[0];
|
||||
if (firstChar >= '0' && firstChar <= '9' &&
|
||||
str.Skip(1).All(x => x >= '0' && x <= '9'))
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
// Check for (-|+)[0-9]+
|
||||
else if ((firstChar == '-' || firstChar == '+') &&
|
||||
str.Length > 1 &&
|
||||
str.Skip(1).All(x => x >= '0' && x <= '9'))
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
// Check for 0x[0-9a-fA-F]+
|
||||
else if (firstChar == '0' &&
|
||||
str.Length > 2 &&
|
||||
str[1] == 'x' &&
|
||||
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
|
||||
{
|
||||
// Try parse
|
||||
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
// Check for 0o[0-9]+
|
||||
else if (firstChar == '0' &&
|
||||
str.Length > 2 &&
|
||||
str[1] == 'o' &&
|
||||
str.Skip(2).All(x => x >= '0' && x <= '7'))
|
||||
{
|
||||
// Try parse
|
||||
var integerValue = default(Int32);
|
||||
try
|
||||
{
|
||||
integerValue = Convert.ToInt32(str.Substring(2), 8);
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
catch (Exception)
|
||||
{
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private Boolean MatchNull(
|
||||
Scalar scalar,
|
||||
out NullToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
switch (scalar.Value ?? String.Empty)
|
||||
{
|
||||
case "":
|
||||
case "null":
|
||||
case "Null":
|
||||
case "NULL":
|
||||
case "~":
|
||||
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private void ThrowInvalidValue(
|
||||
Scalar scalar,
|
||||
String tag)
|
||||
{
|
||||
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{tag}'");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The maximum number of YAML nodes allowed when parsing a file. A single YAML node may be
|
||||
/// encountered multiple times due to YAML anchors.
|
||||
///
|
||||
/// Note, depth and maximum accumulated bytes are tracked in an outer layer. The goal of this
|
||||
/// layer is to prevent YAML anchors from causing excessive node traversal.
|
||||
/// </summary>
|
||||
private const int c_maxYamlNodes = 50000;
|
||||
|
||||
/// <summary>
|
||||
/// Boolean YAML tag
|
||||
/// </summary>
|
||||
private const String c_booleanTag = "tag:yaml.org,2002:bool";
|
||||
|
||||
/// <summary>
|
||||
/// Float YAML tag
|
||||
/// </summary>
|
||||
private const String c_floatTag = "tag:yaml.org,2002:float";
|
||||
|
||||
/// <summary>
|
||||
/// Integer YAML tag
|
||||
/// </summary>
|
||||
private const String c_integerTag = "tag:yaml.org,2002:int";
|
||||
|
||||
/// <summary>
|
||||
/// Null YAML tag
|
||||
/// </summary>
|
||||
private const String c_nullTag = "tag:yaml.org,2002:null";
|
||||
|
||||
/// <summary>
|
||||
/// String YAML tag
|
||||
/// </summary>
|
||||
private const String c_stringTag = "tag:yaml.org,2002:str";
|
||||
|
||||
/// <summary>
|
||||
/// File ID
|
||||
/// </summary>
|
||||
private readonly Int32? m_fileId;
|
||||
|
||||
/// <summary>
|
||||
/// Parser instance
|
||||
/// </summary>
|
||||
private readonly Parser m_parser;
|
||||
|
||||
/// <summary>
|
||||
/// Current parsing event
|
||||
/// </summary>
|
||||
private ParsingEvent m_current;
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether YAML anchors are allowed
|
||||
/// </summary>
|
||||
private readonly Boolean m_allowAnchors;
|
||||
|
||||
/// <summary>
|
||||
/// Telemetry data
|
||||
/// </summary>
|
||||
private readonly Telemetry m_telemetry;
|
||||
|
||||
/// <summary>
|
||||
/// Number of YAML nodes traversed
|
||||
/// </summary>
|
||||
private Int32 m_numNodes;
|
||||
|
||||
/// <summary>
|
||||
/// All encountered parsing events
|
||||
/// </summary>
|
||||
private readonly List<ParsingEvent> m_events;
|
||||
|
||||
/// <summary>
|
||||
/// Anchor event index map
|
||||
/// </summary>
|
||||
private readonly Dictionary<String, Int32> m_anchors;
|
||||
|
||||
/// <summary>
|
||||
/// Stack of anchor replay states
|
||||
/// </summary>
|
||||
private readonly Stack<YamlReplayState> m_replay;
|
||||
}
|
||||
}
|
||||
73
src/Sdk/WorkflowParser/Conversion/YamlObjectWriter.cs
Normal file
73
src/Sdk/WorkflowParser/Conversion/YamlObjectWriter.cs
Normal file
@@ -0,0 +1,73 @@
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||
using YamlDotNet.Core.Events;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
/// <summary>
|
||||
/// Converts a TemplateToken into YAML
|
||||
/// </summary>
|
||||
internal sealed class YamlObjectWriter : IObjectWriter
|
||||
{
|
||||
internal YamlObjectWriter(StringWriter writer)
|
||||
{
|
||||
m_emitter = new YamlDotNet.Core.Emitter(writer);
|
||||
}
|
||||
|
||||
public void WriteString(String value)
|
||||
{
|
||||
m_emitter.Emit(new Scalar(value ?? String.Empty));
|
||||
}
|
||||
|
||||
public void WriteBoolean(Boolean value)
|
||||
{
|
||||
m_emitter.Emit(new Scalar(value ? "true" : "false"));
|
||||
}
|
||||
|
||||
public void WriteNumber(Double value)
|
||||
{
|
||||
m_emitter.Emit(new Scalar(value.ToString("G15", CultureInfo.InvariantCulture)));
|
||||
}
|
||||
|
||||
public void WriteNull()
|
||||
{
|
||||
m_emitter.Emit(new Scalar("null"));
|
||||
}
|
||||
|
||||
public void WriteSequenceStart()
|
||||
{
|
||||
m_emitter.Emit(new SequenceStart(null, null, true, SequenceStyle.Block));
|
||||
}
|
||||
|
||||
public void WriteSequenceEnd()
|
||||
{
|
||||
m_emitter.Emit(new SequenceEnd());
|
||||
}
|
||||
|
||||
public void WriteMappingStart()
|
||||
{
|
||||
m_emitter.Emit(new MappingStart());
|
||||
}
|
||||
|
||||
public void WriteMappingEnd()
|
||||
{
|
||||
m_emitter.Emit(new MappingEnd());
|
||||
}
|
||||
|
||||
public void WriteStart()
|
||||
{
|
||||
m_emitter.Emit(new StreamStart());
|
||||
m_emitter.Emit(new DocumentStart());
|
||||
}
|
||||
|
||||
public void WriteEnd()
|
||||
{
|
||||
m_emitter.Emit(new DocumentEnd(isImplicit: true));
|
||||
m_emitter.Emit(new StreamEnd());
|
||||
}
|
||||
|
||||
private readonly YamlDotNet.Core.IEmitter m_emitter;
|
||||
}
|
||||
}
|
||||
23
src/Sdk/WorkflowParser/Conversion/YamlReplayState.cs
Normal file
23
src/Sdk/WorkflowParser/Conversion/YamlReplayState.cs
Normal file
@@ -0,0 +1,23 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
/// <summary>
|
||||
/// Index and depth while replaying a YAML anchor
|
||||
/// </summary>
|
||||
sealed class YamlReplayState
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the current node event index that is being replayed.
|
||||
/// </summary>
|
||||
public Int32 Index { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the depth within the current anchor that is being replayed.
|
||||
/// When the depth reaches zero, the anchor replay is complete.
|
||||
/// </summary>
|
||||
public Int32 Depth { get; set; }
|
||||
}
|
||||
}
|
||||
124
src/Sdk/WorkflowParser/Conversion/YamlTemplateLoader.cs
Normal file
124
src/Sdk/WorkflowParser/Conversion/YamlTemplateLoader.cs
Normal file
@@ -0,0 +1,124 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
|
||||
|
||||
namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
{
|
||||
/// <summary>
|
||||
/// Loads a YAML file, and returns the parsed TemplateToken
|
||||
/// </summary>
|
||||
internal sealed class YamlTemplateLoader
|
||||
{
|
||||
public YamlTemplateLoader(
|
||||
ParseOptions parseOptions,
|
||||
IFileProvider fileProvider)
|
||||
{
|
||||
m_parseOptions = new ParseOptions(parseOptions);
|
||||
m_fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses a workflow <c ref="TemplateToken" /> template.
|
||||
///
|
||||
/// Check <c ref="TemplateToken.Errors" /> for errors.
|
||||
/// </summary>
|
||||
public TemplateToken ParseWorkflow(
|
||||
TemplateContext context,
|
||||
String path)
|
||||
{
|
||||
var result = default(TemplateToken);
|
||||
try
|
||||
{
|
||||
result = LoadFile(context, path, WorkflowTemplateConstants.WorkflowRoot);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
context.Errors.Add(ex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private TemplateToken LoadFile(
|
||||
TemplateContext context,
|
||||
String path,
|
||||
String templateType)
|
||||
{
|
||||
if (context.Errors.Count > 0)
|
||||
{
|
||||
throw new InvalidOperationException("Expected error count to be 0 when attempting to load a new file");
|
||||
}
|
||||
|
||||
// Is entry file?
|
||||
var isEntryFile = m_referencedFiles.Count == 0;
|
||||
|
||||
// Root the path
|
||||
path = m_fileProvider.ResolvePath(null, path);
|
||||
|
||||
// Validate max files
|
||||
m_referencedFiles.Add(path);
|
||||
if (m_parseOptions.MaxFiles > 0 && m_referencedFiles.Count > m_parseOptions.MaxFiles)
|
||||
{
|
||||
throw new InvalidOperationException($"The maximum file count of {m_parseOptions.MaxFiles} has been exceeded");
|
||||
}
|
||||
|
||||
// Get the file ID
|
||||
var fileId = context.GetFileId(path);
|
||||
|
||||
// Check the cache
|
||||
if (!m_cache.TryGetValue(path, out String fileContent))
|
||||
{
|
||||
// Fetch the file
|
||||
context.CancellationToken.ThrowIfCancellationRequested();
|
||||
fileContent = m_fileProvider.GetFileContent(path);
|
||||
|
||||
// Validate max file size
|
||||
if (fileContent.Length > m_parseOptions.MaxFileSize)
|
||||
{
|
||||
throw new InvalidOperationException($"{path}: The maximum file size of {m_parseOptions.MaxFileSize} characters has been exceeded");
|
||||
}
|
||||
|
||||
// Cache
|
||||
m_cache[path] = fileContent;
|
||||
}
|
||||
|
||||
// Deserialize
|
||||
var token = default(TemplateToken);
|
||||
using (var stringReader = new StringReader(fileContent))
|
||||
{
|
||||
var yamlObjectReader = new YamlObjectReader(fileId, stringReader, m_parseOptions.AllowAnchors, context.Telemetry);
|
||||
token = TemplateReader.Read(context, templateType, yamlObjectReader, fileId, out _);
|
||||
}
|
||||
|
||||
// Trace
|
||||
if (!isEntryFile)
|
||||
{
|
||||
context.TraceWriter.Info(String.Empty);
|
||||
}
|
||||
context.TraceWriter.Info("# ");
|
||||
context.TraceWriter.Info("# {0}", path);
|
||||
context.TraceWriter.Info("# ");
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cache of file content
|
||||
/// </summary>
|
||||
private readonly Dictionary<String, String> m_cache = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
private readonly IFileProvider m_fileProvider;
|
||||
|
||||
private readonly ParseOptions m_parseOptions;
|
||||
|
||||
/// <summary>
|
||||
/// Tracks unique file references
|
||||
/// </summary>
|
||||
private readonly HashSet<String> m_referencedFiles = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user