diff --git a/go.mod b/go.mod index 97b3b197..0f124dbb 100644 --- a/go.mod +++ b/go.mod @@ -21,7 +21,6 @@ require ( github.com/opencontainers/image-spec v1.1.1 github.com/opencontainers/selinux v1.12.0 github.com/pkg/errors v0.9.1 - github.com/rhysd/actionlint v1.7.7 github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 github.com/sirupsen/logrus v1.9.3 github.com/spf13/cobra v1.10.1 @@ -49,7 +48,6 @@ require ( require ( github.com/Microsoft/go-winio v0.6.2 // indirect github.com/ProtonMail/go-crypto v1.1.6 // indirect - github.com/bmatcuk/doublestar/v4 v4.8.0 // indirect github.com/cloudflare/circl v1.6.1 // indirect github.com/containerd/errdefs/pkg v0.3.0 // indirect github.com/containerd/log v0.1.0 // indirect @@ -59,7 +57,6 @@ require ( github.com/docker/docker-credential-helpers v0.8.2 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/emirpasic/gods v1.18.1 // indirect - github.com/fatih/color v1.18.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-logr/logr v1.4.2 // indirect @@ -73,8 +70,6 @@ require ( github.com/kevinburke/ssh_config v1.2.0 // indirect github.com/klauspost/compress v1.18.0 // indirect github.com/mattn/go-colorable v0.1.14 // indirect - github.com/mattn/go-runewidth v0.0.16 // indirect - github.com/mattn/go-shellwords v1.0.12 // indirect github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/sys/atomicwriter v0.1.0 // indirect @@ -86,8 +81,6 @@ require ( github.com/opencontainers/go-digest v1.0.0 // indirect github.com/pjbgf/sha1cd v0.3.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/rivo/uniseg v0.4.7 // indirect - github.com/robfig/cron/v3 v3.0.1 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect github.com/skeema/knownhosts v1.3.1 // indirect diff --git a/go.sum b/go.sum index 65385fc6..e9d0eee1 100644 --- a/go.sum +++ b/go.sum @@ -15,8 +15,6 @@ github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63n github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w= github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw= github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= -github.com/actions-oss/act-cli-actionlint v0.0.0-20250517100532-8f847f29ba36 h1:QnIPcWM4eVfqRUB3B6sLOwEJrMrTa64qrVqzxF5A21U= -github.com/actions-oss/act-cli-actionlint v0.0.0-20250517100532-8f847f29ba36/go.mod h1:AE6I6vJEkNaIfWqC2GNE5spIJNhxf8NCtLEKU4NnUXg= github.com/adrg/xdg v0.5.3 h1:xRnxJXne7+oWDatRhR1JLnvuccuIeCoBu2rtuLqQB78= github.com/adrg/xdg v0.5.3/go.mod h1:nlTsY+NNiCBGCK2tpm09vRqfVzrc2fLmXGpBLF0zlTQ= github.com/andreaskoch/go-fswatch v1.0.0 h1:la8nP/HiaFCxP2IM6NZNUCoxgLWuyNFgH0RligBbnJU= @@ -27,8 +25,6 @@ github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPd github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/avast/retry-go v3.0.0+incompatible h1:4SOWQ7Qs+oroOTQOYnAHqelpCO0biHSxpiH9JdtuBj0= github.com/avast/retry-go v3.0.0+incompatible/go.mod h1:XtSnn+n/sHqQIpZ10K1qAevBhOOCWBLXXy3hyiqqBrY= -github.com/bmatcuk/doublestar/v4 v4.8.0 h1:DSXtrypQddoug1459viM9X9D3dp1Z7993fw36I2kNcQ= -github.com/bmatcuk/doublestar/v4 v4.8.0/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= @@ -65,8 +61,6 @@ github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= -github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= -github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= @@ -127,10 +121,6 @@ github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stg github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= -github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= -github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= @@ -166,11 +156,6 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= -github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= -github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= diff --git a/internal/eval/functions/format.go b/internal/eval/functions/format.go new file mode 100644 index 00000000..429c786c --- /dev/null +++ b/internal/eval/functions/format.go @@ -0,0 +1,122 @@ +package functions + +import ( + "fmt" + "strconv" + "strings" +) + +// Format evaluates a format string with the supplied arguments. +// It behaves like the C# implementation in the repository – +// it supports escaped braces and numeric argument indices. +// Format specifiers (e.g. :D) are recognised but currently ignored. +func Format(formatStr string, args ...interface{}) (string, error) { + var sb strings.Builder + i := 0 + for i < len(formatStr) { + lbrace := strings.IndexByte(formatStr[i:], '{') + rbrace := strings.IndexByte(formatStr[i:], '}') + + // left brace + if lbrace >= 0 && (rbrace < 0 || rbrace > lbrace) { + l := i + lbrace + + sb.WriteString(formatStr[i:l]) + + // escaped left brace + if l+1 < len(formatStr) && formatStr[l+1] == '{' { + sb.WriteString(formatStr[l : l+1]) + i = l + 2 + continue + } + + // normal placeholder + if rbrace > lbrace+1 { + // read index + idx, endIdx, ok := readArgIndex(formatStr, l+1) + if !ok { + return "", fmt.Errorf("invalid format string: %s", formatStr) + } + // read optional format specifier + spec, r, ok := readFormatSpecifiers(formatStr, endIdx+1) + if !ok { + return "", fmt.Errorf("invalid format string: %s", formatStr) + } + if idx >= len(args) { + return "", fmt.Errorf("argument index %d out of range", idx) + } + // append argument (format specifier is ignored here) + arg := args[idx] + sb.WriteString(fmt.Sprintf("%v", arg)) + if spec != "" { + // placeholder for future specifier handling + _ = spec + } + i = r + 1 + continue + } + return "", fmt.Errorf("invalid format string: %s", formatStr) + } + + // right brace + if rbrace >= 0 { + // escaped right brace + if i+rbrace+1 < len(formatStr) && formatStr[i+rbrace+1] == '}' { + sb.WriteString(formatStr[i : i+rbrace+1]) + i += rbrace + 2 + continue + } + return "", fmt.Errorf("invalid format string: %s", formatStr) + } + + // rest of string + sb.WriteString(formatStr[i:]) + break + } + return sb.String(), nil +} + +// readArgIndex parses a decimal number starting at pos. +// It returns the parsed value, the index of the last digit and true on success. +func readArgIndex(s string, pos int) (int, int, bool) { + start := pos + for pos < len(s) && s[pos] >= '0' && s[pos] <= '9' { + pos++ + } + if start == pos { + return 0, 0, false + } + idx, err := strconv.Atoi(s[start:pos]) + if err != nil { + return 0, 0, false + } + return idx, pos - 1, true +} + +// readFormatSpecifiers reads an optional format specifier block. +// It returns the specifier string, the index of the closing '}' and true on success. +func readFormatSpecifiers(s string, pos int) (string, int, bool) { + if pos >= len(s) { + return "", 0, false + } + if s[pos] == '}' { + return "", pos, true + } + if s[pos] != ':' { + return "", 0, false + } + pos++ // skip ':' + start := pos + for pos < len(s) { + if s[pos] == '}' { + return s[start:pos], pos, true + } + if s[pos] == '}' && pos+1 < len(s) && s[pos+1] == '}' { + // escaped '}' + pos += 2 + continue + } + pos++ + } + return "", 0, false +} diff --git a/internal/eval/functions/format_test.go b/internal/eval/functions/format_test.go new file mode 100644 index 00000000..7db22478 --- /dev/null +++ b/internal/eval/functions/format_test.go @@ -0,0 +1,14 @@ +package functions + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFormat(t *testing.T) { + s, err := Format("Hello {0}, you have {1} new messages", "Alice", 5) + assert.NoError(t, err) + fmt.Println(s) // Hello Alice, you have 5 new messages +} diff --git a/internal/eval/v2/evaluation_result.go b/internal/eval/v2/evaluation_result.go new file mode 100644 index 00000000..1af5bd07 --- /dev/null +++ b/internal/eval/v2/evaluation_result.go @@ -0,0 +1,464 @@ +package v2 + +import ( + "fmt" + "math" + "strconv" + "strings" +) + +// ValueKind represents the type of a value in the evaluation engine. +// The values mirror the C# ValueKind enum. +// +// Note: The names are kept identical to the C# implementation for easier mapping. +// +// The lexer is intentionally simple – it only tokenises the subset of +// expressions that are used in GitHub Actions workflow `if:` expressions. +// It does not evaluate the expression – that is left to the parser. + +type ValueKind int + +const ( + ValueKindNull ValueKind = iota + ValueKindBoolean + ValueKindNumber + ValueKindString + ValueKindObject + ValueKindArray +) + +type ReadOnlyArray[T any] interface { + GetAt(i int64) T + GetEnumerator() []T +} + +type ReadOnlyObject[T any] interface { + Get(key string) T + GetEnumerator() map[string]T +} + +type BasicArray[T any] []T + +func (a BasicArray[T]) GetAt(i int64) T { + if int(i) >= len(a) { + var zero T + return zero + } + return a[i] +} + +func (a BasicArray[T]) GetEnumerator() []T { + return a +} + +type CaseInsensitiveObject[T any] map[string]T + +func (o CaseInsensitiveObject[T]) Get(key string) T { + for k, v := range o { + if strings.EqualFold(k, key) { + return v + } + } + var zero T + return zero +} + +func (o CaseInsensitiveObject[T]) GetEnumerator() map[string]T { + return o +} + +type CaseSensitiveObject[T any] map[string]T + +func (o CaseSensitiveObject[T]) Get(key string) T { + return o[key] +} + +func (o CaseSensitiveObject[T]) GetEnumerator() map[string]T { + return o +} + +// EvaluationResult holds the result of evaluating an expression node. +// It mirrors the C# EvaluationResult class. + +type EvaluationResult struct { + context *EvaluationContext + level int + value interface{} + kind ValueKind + raw interface{} + omitTracing bool +} + +// NewEvaluationResult creates a new EvaluationResult. +func NewEvaluationResult(context *EvaluationContext, level int, val interface{}, kind ValueKind, raw interface{}, omitTracing bool) *EvaluationResult { + er := &EvaluationResult{context: context, level: level, value: val, kind: kind, raw: raw, omitTracing: omitTracing} + if !omitTracing { + er.traceValue() + } + return er +} + +// Kind returns the ValueKind of the result. +func (er *EvaluationResult) Kind() ValueKind { return er.kind } + +// Raw returns the raw value that was passed to the constructor. +func (er *EvaluationResult) Raw() interface{} { return er.raw } + +// Value returns the canonical value. +func (er *EvaluationResult) Value() interface{} { return er.value } + +// IsFalsy implements the logic from the C# class. +func (er *EvaluationResult) IsFalsy() bool { + switch er.kind { + case ValueKindNull: + return true + case ValueKindBoolean: + return !er.value.(bool) + case ValueKindNumber: + v := er.value.(float64) + return v == 0 || isNaN(v) + case ValueKindString: + return er.value.(string) == "" + default: + return false + } +} + +func isNaN(v float64) bool { return v != v } + +// IsPrimitive returns true if the kind is a primitive type. +func (er *EvaluationResult) IsPrimitive() bool { return er.kind <= ValueKindString } + +// IsTruthy is the negation of IsFalsy. +func (er *EvaluationResult) IsTruthy() bool { return !er.IsFalsy() } + +// AbstractEqual compares two EvaluationResults using the abstract equality algorithm. +func (er *EvaluationResult) AbstractEqual(other *EvaluationResult) bool { + return abstractEqual(er.value, other.value) +} + +// AbstractGreaterThan compares two EvaluationResults. +func (er *EvaluationResult) AbstractGreaterThan(other *EvaluationResult) bool { + return abstractGreaterThan(er.value, other.value) +} + +// AbstractGreaterThanOrEqual +func (er *EvaluationResult) AbstractGreaterThanOrEqual(other *EvaluationResult) bool { + return er.AbstractEqual(other) || er.AbstractGreaterThan(other) +} + +// AbstractLessThan +func (er *EvaluationResult) AbstractLessThan(other *EvaluationResult) bool { + return abstractLessThan(er.value, other.value) +} + +// AbstractLessThanOrEqual +func (er *EvaluationResult) AbstractLessThanOrEqual(other *EvaluationResult) bool { + return er.AbstractEqual(other) || er.AbstractLessThan(other) +} + +// AbstractNotEqual +func (er *EvaluationResult) AbstractNotEqual(other *EvaluationResult) bool { + return !er.AbstractEqual(other) +} + +// ConvertToNumber converts the value to a float64. +func (er *EvaluationResult) ConvertToNumber() float64 { return convertToNumber(er.value) } + +// ConvertToString converts the value to a string. +func (er *EvaluationResult) ConvertToString() string { + switch er.kind { + case ValueKindNull: + return "" + case ValueKindBoolean: + if er.value.(bool) { + return ExpressionConstants.True + } + return ExpressionConstants.False + case ValueKindNumber: + return fmt.Sprintf(ExpressionConstants.NumberFormat, er.value.(float64)) + case ValueKindString: + return er.value.(string) + default: + return fmt.Sprintf("%v", er.value) + } +} + +// TryGetCollectionInterface returns the underlying collection if the value is an array or object. +func (er *EvaluationResult) TryGetCollectionInterface() (interface{}, bool) { + switch v := er.value.(type) { + case ReadOnlyArray[any]: + return v, true + case ReadOnlyObject[any]: + return v, true + default: + return nil, false + } +} + +// CreateIntermediateResult creates an EvaluationResult from an arbitrary object. +func CreateIntermediateResult(context *EvaluationContext, obj interface{}) *EvaluationResult { + val, kind, raw := convertToCanonicalValue(obj) + return NewEvaluationResult(context, 0, val, kind, raw, true) +} + +// --- Helper functions and constants --------------------------------------- + +// ExpressionConstants holds string constants used in conversions. +var ExpressionConstants = struct { + True string + False string + NumberFormat string +}{ + True: "true", + False: "false", + NumberFormat: "%.15g", +} + +// convertToCanonicalValue converts an arbitrary Go value to a canonical form. +func convertToCanonicalValue(obj interface{}) (interface{}, ValueKind, interface{}) { + switch v := obj.(type) { + case nil: + return nil, ValueKindNull, nil + case bool: + return v, ValueKindBoolean, v + case int, int8, int16, int32, int64: + f := float64(toInt64(v)) + return f, ValueKindNumber, f + case uint, uint8, uint16, uint32, uint64: + f := float64(toUint64(v)) + return f, ValueKindNumber, f + case float32, float64: + f := toFloat64(v) + return f, ValueKindNumber, f + case string: + return v, ValueKindString, v + case []interface{}: + return BasicArray[any](v), ValueKindArray, v + case ReadOnlyArray[any]: + return v, ValueKindArray, v + case map[string]interface{}: + return CaseInsensitiveObject[any](v), ValueKindObject, v + case ReadOnlyObject[any]: + return v, ValueKindObject, v + default: + // Fallback: treat as object + return v, ValueKindObject, v + } +} + +func toInt64(v interface{}) int64 { + switch i := v.(type) { + case int: + return int64(i) + case int8: + return int64(i) + case int16: + return int64(i) + case int32: + return int64(i) + case int64: + return i + default: + return 0 + } +} + +func toUint64(v interface{}) uint64 { + switch i := v.(type) { + case uint: + return uint64(i) + case uint8: + return uint64(i) + case uint16: + return uint64(i) + case uint32: + return uint64(i) + case uint64: + return i + default: + return 0 + } +} + +func toFloat64(v interface{}) float64 { + switch f := v.(type) { + case float32: + return float64(f) + case float64: + return f + default: + return 0 + } +} + +// coerceTypes implements the C# CoerceTypes logic. +// It converts values to compatible types before comparison. +func coerceTypes(left, right interface{}) (interface{}, interface{}, ValueKind, ValueKind) { + leftKind := getKind(left) + rightKind := getKind(right) + + // same kind – nothing to do + if leftKind == rightKind { + return left, right, leftKind, rightKind + } + + // Number <-> String + if leftKind == ValueKindNumber && rightKind == ValueKindString { + right = convertToNumber(right) + rightKind = ValueKindNumber + return left, right, leftKind, rightKind + } + if leftKind == ValueKindString && rightKind == ValueKindNumber { + left = convertToNumber(left) + leftKind = ValueKindNumber + return left, right, leftKind, rightKind + } + + // Boolean or Null -> Number + if leftKind == ValueKindBoolean || leftKind == ValueKindNull { + left = convertToNumber(left) + return coerceTypes(left, right) + } + if rightKind == ValueKindBoolean || rightKind == ValueKindNull { + right = convertToNumber(right) + return coerceTypes(left, right) + } + + // otherwise keep as is + return left, right, leftKind, rightKind +} + +// abstractEqual uses coerceTypes before comparing. +func abstractEqual(left, right interface{}) bool { + left, right, leftKind, rightKind := coerceTypes(left, right) + if leftKind != rightKind { + return false + } + switch leftKind { + case ValueKindNull: + return true + case ValueKindNumber: + l := left.(float64) + r := right.(float64) + if isNaN(l) || isNaN(r) { + return false + } + return l == r + case ValueKindString: + return strings.EqualFold(left.(string), right.(string)) + case ValueKindBoolean: + return left.(bool) == right.(bool) + // Compare object equality fails via panic + // case ValueKindObject, ValueKindArray: + // return left == right + } + return false +} + +// abstractGreaterThan uses coerceTypes before comparing. +func abstractGreaterThan(left, right interface{}) bool { + left, right, leftKind, rightKind := coerceTypes(left, right) + if leftKind != rightKind { + return false + } + switch leftKind { + case ValueKindNumber: + l := left.(float64) + r := right.(float64) + if isNaN(l) || isNaN(r) { + return false + } + return l > r + case ValueKindString: + return strings.Compare(left.(string), right.(string)) > 0 + case ValueKindBoolean: + return left.(bool) && !right.(bool) + } + return false +} + +// abstractLessThan uses coerceTypes before comparing. +func abstractLessThan(left, right interface{}) bool { + left, right, leftKind, rightKind := coerceTypes(left, right) + if leftKind != rightKind { + return false + } + switch leftKind { + case ValueKindNumber: + l := left.(float64) + r := right.(float64) + if isNaN(l) || isNaN(r) { + return false + } + return l < r + case ValueKindString: + return strings.Compare(left.(string), right.(string)) < 0 + case ValueKindBoolean: + return !left.(bool) && right.(bool) + } + return false +} + +// convertToNumber converts a value to a float64 following JavaScript rules. +func convertToNumber(v interface{}) float64 { + switch val := v.(type) { + case nil: + return 0 + case bool: + if val { + return 1 + } + return 0 + case float64: + return val + case float32: + return float64(val) + case string: + // parsenumber + if val == "" { + return float64(0) + } + if len(val) > 2 { + switch val[:2] { + case "0x", "0o": + if i, err := strconv.ParseInt(val, 0, 32); err == nil { + return float64(i) + } + } + } + if f, err := strconv.ParseFloat(val, 64); err == nil { + return f + } + return math.NaN() + default: + return math.NaN() + } +} + +// getKind returns the ValueKind for a Go value. +func getKind(v interface{}) ValueKind { + switch v.(type) { + case nil: + return ValueKindNull + case bool: + return ValueKindBoolean + case float64, float32, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64: + return ValueKindNumber + case string: + return ValueKindString + case []interface{}: + return ValueKindArray + case map[string]interface{}: + return ValueKindObject + default: + return ValueKindObject + } +} + +// traceValue is a placeholder for tracing logic. +func (er *EvaluationResult) traceValue() { + // No-op in this simplified implementation. +} + +// --- End of file --------------------------------------- diff --git a/internal/eval/v2/evaluator.go b/internal/eval/v2/evaluator.go new file mode 100644 index 00000000..8caa03dc --- /dev/null +++ b/internal/eval/v2/evaluator.go @@ -0,0 +1,276 @@ +package v2 + +import ( + "errors" + "fmt" + + exprparser "github.com/actions-oss/act-cli/internal/expr" +) + +// EvaluationContext holds variables that can be referenced in expressions. +type EvaluationContext struct { + Variables ReadOnlyObject[any] + Functions ReadOnlyObject[Function] +} + +func NewEvaluationContext() *EvaluationContext { + return &EvaluationContext{} +} + +type Function interface { + Evaluate(eval *Evaluator, args []exprparser.Node) (*EvaluationResult, error) +} + +// Evaluator evaluates workflow expressions using the lexer and parser from workflow. +type Evaluator struct { + ctx *EvaluationContext +} + +// NewEvaluator creates an Evaluator with the supplied context. +func NewEvaluator(ctx *EvaluationContext) *Evaluator { + return &Evaluator{ctx: ctx} +} + +func (e *Evaluator) Context() *EvaluationContext { + return e.ctx +} + +func (e *Evaluator) Evaluate(root exprparser.Node) (*EvaluationResult, error) { + result, err := e.evalNode(root) + if err != nil { + return nil, err + } + return result, nil +} + +// EvaluateBoolean parses and evaluates the expression, returning a boolean result. +func (e *Evaluator) EvaluateBoolean(expr string) (bool, error) { + root, err := exprparser.Parse(expr) + if err != nil { + return false, fmt.Errorf("parse error: %w", err) + } + result, err := e.evalNode(root) + if err != nil { + return false, err + } + return result.IsTruthy(), nil +} + +func (e *Evaluator) ToRaw(result *EvaluationResult) (interface{}, error) { + if col, ok := result.TryGetCollectionInterface(); ok { + switch node := col.(type) { + case ReadOnlyObject[any]: + rawMap := map[string]interface{}{} + for k, v := range node.GetEnumerator() { + rawRes, err := e.ToRaw(CreateIntermediateResult(e.Context(), v)) + if err != nil { + return nil, err + } + rawMap[k] = rawRes + } + return rawMap, nil + case ReadOnlyArray[any]: + rawArray := []interface{}{} + for _, v := range node.GetEnumerator() { + rawRes, err := e.ToRaw(CreateIntermediateResult(e.Context(), v)) + if err != nil { + return nil, err + } + rawArray = append(rawArray, rawRes) + } + return rawArray, nil + } + } + return result.Value(), nil +} + +// Evaluate parses and evaluates the expression, returning a boolean result. +func (e *Evaluator) EvaluateRaw(expr string) (interface{}, error) { + root, err := exprparser.Parse(expr) + if err != nil { + return false, fmt.Errorf("parse error: %w", err) + } + result, err := e.evalNode(root) + if err != nil { + return false, err + } + return e.ToRaw(result) +} + +type FilteredArray []interface{} + +func (a FilteredArray) GetAt(i int64) interface{} { + if int(i) > len(a) { + return nil + } + return a[i] +} + +func (a FilteredArray) GetEnumerator() []interface{} { + return a +} + +// evalNode recursively evaluates a parser node and returns an EvaluationResult. +func (e *Evaluator) evalNode(n exprparser.Node) (*EvaluationResult, error) { + switch node := n.(type) { + case *exprparser.ValueNode: + return e.evalValueNode(node) + case *exprparser.FunctionNode: + return e.evalFunctionNode(node) + case *exprparser.BinaryNode: + return e.evalBinaryNode(node) + case *exprparser.UnaryNode: + return e.evalUnaryNode(node) + } + return nil, errors.New("unknown node type") +} + +func (e *Evaluator) evalValueNode(node *exprparser.ValueNode) (*EvaluationResult, error) { + if node.Kind == exprparser.TokenKindNamedValue { + if e.ctx != nil { + val := e.ctx.Variables.Get(node.Value.(string)) + if val == nil { + return nil, fmt.Errorf("undefined variable %s", node.Value) + } + return CreateIntermediateResult(e.Context(), val), nil + } + return nil, errors.New("no evaluation context") + } + return CreateIntermediateResult(e.Context(), node.Value), nil +} + +func (e *Evaluator) evalFunctionNode(node *exprparser.FunctionNode) (*EvaluationResult, error) { + fn := e.ctx.Functions.Get(node.Name) + if fn == nil { + return nil, fmt.Errorf("unknown function %v", node.Name) + } + return fn.Evaluate(e, node.Args) +} + +func (e *Evaluator) evalBinaryNode(node *exprparser.BinaryNode) (*EvaluationResult, error) { + left, err := e.evalNode(node.Left) + if err != nil { + return nil, err + } + if res, err := e.evalBinaryNodeLeft(node, left); res != nil || err != nil { + return res, err + } + right, err := e.evalNode(node.Right) + if err != nil { + return nil, err + } + return e.evalBinaryNodeRight(node, left, right) +} + +func (e *Evaluator) evalBinaryNodeLeft(node *exprparser.BinaryNode, left *EvaluationResult) (*EvaluationResult, error) { + switch node.Op { + case "&&": + if left.IsFalsy() { + return left, nil + } + case "||": + if left.IsTruthy() { + return left, nil + } + case ".": + if v, ok := node.Right.(*exprparser.ValueNode); ok && v.Kind == exprparser.TokenKindWildcard { + var ret FilteredArray + if col, ok := left.TryGetCollectionInterface(); ok { + if farray, ok := col.(FilteredArray); ok { + for _, subcol := range farray.GetEnumerator() { + ret = processStar(CreateIntermediateResult(e.Context(), subcol).Value(), ret) + } + } else { + ret = processStar(col, ret) + } + } + return CreateIntermediateResult(e.Context(), ret), nil + } + } + return nil, nil +} + +func (e *Evaluator) evalBinaryNodeRight(node *exprparser.BinaryNode, left *EvaluationResult, right *EvaluationResult) (*EvaluationResult, error) { + switch node.Op { + case "&&": + return right, nil + case "||": + return right, nil + case "==": + // Use abstract equality per spec + return CreateIntermediateResult(e.Context(), left.AbstractEqual(right)), nil + case "!=": + return CreateIntermediateResult(e.Context(), left.AbstractNotEqual(right)), nil + case ">": + return CreateIntermediateResult(e.Context(), left.AbstractGreaterThan(right)), nil + case "<": + return CreateIntermediateResult(e.Context(), left.AbstractLessThan(right)), nil + case ">=": + return CreateIntermediateResult(e.Context(), left.AbstractGreaterThanOrEqual(right)), nil + case "<=": + return CreateIntermediateResult(e.Context(), left.AbstractLessThanOrEqual(right)), nil + case ".", "[": + if farray, ok := left.Value().(FilteredArray); ok { + var ret FilteredArray + for _, subcol := range farray.GetEnumerator() { + res := processIndex(CreateIntermediateResult(e.Context(), subcol).Value(), right) + if res != nil { + ret = append(ret, res) + } + } + if ret == nil { + return CreateIntermediateResult(e.Context(), nil), nil + } + return CreateIntermediateResult(e.Context(), ret), nil + } + col, _ := left.TryGetCollectionInterface() + result := processIndex(col, right) + return CreateIntermediateResult(e.Context(), result), nil + default: + return nil, fmt.Errorf("unsupported operator %s", node.Op) + } +} + +func (e *Evaluator) evalUnaryNode(node *exprparser.UnaryNode) (*EvaluationResult, error) { + operand, err := e.evalNode(node.Operand) + if err != nil { + return nil, err + } + switch node.Op { + case "!": + return CreateIntermediateResult(e.Context(), !operand.IsTruthy()), nil + default: + return nil, fmt.Errorf("unsupported unary operator %s", node.Op) + } +} + +func processIndex(col interface{}, right *EvaluationResult) interface{} { + if mapVal, ok := col.(ReadOnlyObject[any]); ok { + key, ok := right.Value().(string) + if !ok { + return nil + } + val := mapVal.Get(key) + return val + } + if arrayVal, ok := col.(ReadOnlyArray[any]); ok { + key, ok := right.Value().(float64) + if !ok || key < 0 { + return nil + } + val := arrayVal.GetAt(int64(key)) + return val + } + return nil +} + +func processStar(subcol interface{}, ret FilteredArray) FilteredArray { + if array, ok := subcol.(ReadOnlyArray[any]); ok { + ret = append(ret, array.GetEnumerator()...) + } else if obj, ok := subcol.(ReadOnlyObject[any]); ok { + for _, v := range obj.GetEnumerator() { + ret = append(ret, v) + } + } + return ret +} diff --git a/internal/eval/v2/evaluator_test.go b/internal/eval/v2/evaluator_test.go new file mode 100644 index 00000000..bcebc291 --- /dev/null +++ b/internal/eval/v2/evaluator_test.go @@ -0,0 +1,111 @@ +package v2 + +import ( + "testing" +) + +// Test boolean and comparison operations using the evaluator. +func TestEvaluator_BooleanOps(t *testing.T) { + ctx := &EvaluationContext{Variables: CaseInsensitiveObject[any](map[string]interface{}{"a": 5, "b": 3})} + eval := NewEvaluator(ctx) + + tests := []struct { + expr string + want bool + }{ + {"1 == 1", true}, + {"1 != 2", true}, + {"5 > 3", true}, + {"2 < 4", true}, + {"5 >= 5", true}, + {"3 <= 4", true}, + {"true && false", false}, + {"!false", true}, + {"a > b", true}, + } + + for _, tt := range tests { + got, err := eval.EvaluateBoolean(tt.expr) + if err != nil { + t.Fatalf("evaluate %s error: %v", tt.expr, err) + } + if got != tt.want { + t.Fatalf("evaluate %s expected %v got %v", tt.expr, tt.want, got) + } + } +} + +func TestEvaluator_Raw(t *testing.T) { + ctx := &EvaluationContext{ + Variables: CaseInsensitiveObject[any](map[string]any{"a": 5, "b": 3}), + Functions: GetFunctions(), + } + eval := NewEvaluator(ctx) + + tests := []struct { + expr string + want interface{} + }{ + {"a.b['x']", nil}, + {"(a.b).c['x']", nil}, + {"(a.b).*['x']", nil}, + {"(a['x'])", nil}, + {"true || false", true}, + {"false || false", false}, + {"false || true", true}, + {"false || true || false", true}, + {"contains('', '') || contains('', '') || contains('', '')", true}, + {"1 == 1", true}, + {"1 != 2", true}, + {"5 > 3", true}, + {"2 < 4", true}, + {"5 >= 5", true}, + {"3 <= 4", true}, + {"true && false", false}, + {"!false", true}, + {"a > b", true}, + {"!(a > b)", false}, + {"!(a > b) || !0", true}, + {"!(a > b) || !(1)", false}, + {"'Hello World'", "Hello World"}, + {"23.5", 23.5}, + {"fromjson('{\"twst\":\"x\"}')['twst']", "x"}, + {"fromjson('{\"Twst\":\"x\"}')['twst']", "x"}, + {"fromjson('{\"TwsT\":\"x\"}')['twst']", "x"}, + {"fromjson('{\"TwsT\":\"x\"}')['tWst']", "x"}, + {"fromjson('{\"TwsT\":{\"a\":\"y\"}}').TwsT.a", "y"}, + {"fromjson('{\"TwsT\":{\"a\":\"y\"}}')['TwsT'].a", "y"}, + {"fromjson('{\"TwsT\":{\"a\":\"y\"}}')['TwsT']['a']", "y"}, + {"fromjson('{\"TwsT\":{\"a\":\"y\"}}').TwsT['a']", "y"}, + // {"fromjson('{\"TwsT\":\"x\"}').*[0]", "x"}, + {"fromjson('{\"TwsT\":[\"x\"]}')['TwsT'][0]", "x"}, + {"fromjson('[]')['tWst']", nil}, + {"fromjson('[]').tWst", nil}, + {"contains('a', 'a')", true}, + {"contains('bab', 'a')", true}, + {"contains('bab', 'ac')", false}, + {"contains(fromjson('[\"ac\"]'), 'ac')", true}, + {"contains(fromjson('[\"ac\"]'), 'a')", false}, + // {"fromjson('{\"TwsT\":{\"a\":\"y\"}}').*['a']", "y"}, + {"fromjson(tojson(fromjson('{\"TwsT\":{\"a\":\"y\"}}').*.a))[0]", "y"}, + {"fromjson(tojson(fromjson('{\"TwsT\":{\"a\":\"y\"}}').*['a']))[0]", "y"}, + {"fromjson('{}').x", nil}, + {"format('{0}', fromjson('{}').x)", ""}, + {"format('{0}', fromjson('{}')[0])", ""}, + {"fromjson(tojson(fromjson('[[3,5],[5,6]]').*[1]))[1]", float64(6)}, + {"contains(fromjson('[[3,5],[5,6]]').*[1], 5)", true}, + {"contains(fromjson('[[3,5],[5,6]]').*[1], 6)", true}, + {"contains(fromjson('[[3,5],[5,6]]').*[1], 3)", false}, + {"contains(fromjson('[[3,5],[5,6]]').*[1], '6')", true}, + } + + for _, tt := range tests { + got, err := eval.EvaluateRaw(tt.expr) + if err != nil { + t.Fatalf("evaluate %s error: %v", tt.expr, err) + } + if got != tt.want { + t.Fatalf("evaluate %s expected %v got %v", tt.expr, tt.want, got) + } + } +} diff --git a/internal/eval/v2/functions.go b/internal/eval/v2/functions.go new file mode 100644 index 00000000..5599491e --- /dev/null +++ b/internal/eval/v2/functions.go @@ -0,0 +1,176 @@ +package v2 + +import ( + "encoding/json" + "strings" + + "github.com/actions-oss/act-cli/internal/eval/functions" + exprparser "github.com/actions-oss/act-cli/internal/expr" +) + +type FromJSON struct { +} + +func (FromJSON) Evaluate(eval *Evaluator, args []exprparser.Node) (*EvaluationResult, error) { + r, err := eval.Evaluate(args[0]) + if err != nil { + return nil, err + } + var res any + if err := json.Unmarshal([]byte(r.ConvertToString()), &res); err != nil { + return nil, err + } + + return CreateIntermediateResult(eval.Context(), res), nil +} + +type ToJSON struct { +} + +func (ToJSON) Evaluate(eval *Evaluator, args []exprparser.Node) (*EvaluationResult, error) { + r, err := eval.Evaluate(args[0]) + if err != nil { + return nil, err + } + raw, err := eval.ToRaw(r) + if err != nil { + return nil, err + } + data, err := json.MarshalIndent(raw, "", " ") + if err != nil { + return nil, err + } + return CreateIntermediateResult(eval.Context(), string(data)), nil +} + +type Contains struct { +} + +func (Contains) Evaluate(eval *Evaluator, args []exprparser.Node) (*EvaluationResult, error) { + collection, err := eval.Evaluate(args[0]) + if err != nil { + return nil, err + } + el, err := eval.Evaluate(args[1]) + if err != nil { + return nil, err + } + // Array + if col, ok := collection.TryGetCollectionInterface(); ok { + if node, ok := col.(ReadOnlyArray[any]); ok { + for _, v := range node.GetEnumerator() { + canon := CreateIntermediateResult(eval.Context(), v) + if canon.AbstractEqual(el) { + return CreateIntermediateResult(eval.Context(), true), nil + } + } + } + return CreateIntermediateResult(eval.Context(), false), nil + } + // String + return CreateIntermediateResult(eval.Context(), strings.Contains(strings.ToLower(collection.ConvertToString()), strings.ToLower(el.ConvertToString()))), nil +} + +type StartsWith struct { +} + +func (StartsWith) Evaluate(eval *Evaluator, args []exprparser.Node) (*EvaluationResult, error) { + collection, err := eval.Evaluate(args[0]) + if err != nil { + return nil, err + } + el, err := eval.Evaluate(args[1]) + if err != nil { + return nil, err + } + // String + return CreateIntermediateResult(eval.Context(), strings.HasPrefix(strings.ToLower(collection.ConvertToString()), strings.ToLower(el.ConvertToString()))), nil +} + +type EndsWith struct { +} + +func (EndsWith) Evaluate(eval *Evaluator, args []exprparser.Node) (*EvaluationResult, error) { + collection, err := eval.Evaluate(args[0]) + if err != nil { + return nil, err + } + el, err := eval.Evaluate(args[1]) + if err != nil { + return nil, err + } + // String + return CreateIntermediateResult(eval.Context(), strings.HasSuffix(strings.ToLower(collection.ConvertToString()), strings.ToLower(el.ConvertToString()))), nil +} + +type Format struct { +} + +func (Format) Evaluate(eval *Evaluator, args []exprparser.Node) (*EvaluationResult, error) { + collection, err := eval.Evaluate(args[0]) + if err != nil { + return nil, err + } + + sargs := []interface{}{} + for _, arg := range args[1:] { + el, err := eval.Evaluate(arg) + if err != nil { + return nil, err + } + sargs = append(sargs, el.ConvertToString()) + } + + ret, err := functions.Format(collection.ConvertToString(), sargs...) + return CreateIntermediateResult(eval.Context(), ret), err +} + +type Join struct { +} + +func (Join) Evaluate(eval *Evaluator, args []exprparser.Node) (*EvaluationResult, error) { + collection, err := eval.Evaluate(args[0]) + if err != nil { + return nil, err + } + var el *EvaluationResult + + if len(args) > 1 { + if el, err = eval.Evaluate(args[1]); err != nil { + return nil, err + } + } + // Array + if col, ok := collection.TryGetCollectionInterface(); ok { + var elements []string + if node, ok := col.(ReadOnlyArray[any]); ok { + for _, v := range node.GetEnumerator() { + elements = append(elements, CreateIntermediateResult(eval.Context(), v).ConvertToString()) + } + } + var sep string + if el != nil { + sep = el.ConvertToString() + } else { + sep = "," + } + return CreateIntermediateResult(eval.Context(), strings.Join(elements, sep)), nil + } + // Primitive + if collection.IsPrimitive() { + return CreateIntermediateResult(eval.Context(), collection.ConvertToString()), nil + } + return CreateIntermediateResult(eval.Context(), ""), nil +} + +func GetFunctions() CaseInsensitiveObject[Function] { + return CaseInsensitiveObject[Function](map[string]Function{ + "fromjson": &FromJSON{}, + "tojson": &ToJSON{}, + "contains": &Contains{}, + "startswith": &StartsWith{}, + "endswith": &EndsWith{}, + "format": &Format{}, + "join": &Join{}, + }) +} diff --git a/internal/expr/expression_parse_test.go b/internal/expr/expression_parse_test.go new file mode 100644 index 00000000..32fb3132 --- /dev/null +++ b/internal/expr/expression_parse_test.go @@ -0,0 +1,27 @@ +package workflow + +import "testing" + +func TestExpressionParser(t *testing.T) { + node, err := Parse("github.event_name") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + t.Logf("Parsed expression: %+v", node) +} + +func TestExpressionParserWildcard(t *testing.T) { + node, err := Parse("github.commits.*.message") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + t.Logf("Parsed expression: %+v", node) +} + +func TestExpressionParserDot(t *testing.T) { + node, err := Parse("github.head_commit.message") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + t.Logf("Parsed expression: %+v", node) +} diff --git a/internal/expr/expression_parser.go b/internal/expr/expression_parser.go new file mode 100644 index 00000000..516edb25 --- /dev/null +++ b/internal/expr/expression_parser.go @@ -0,0 +1,306 @@ +package workflow + +import ( + "errors" + "fmt" + "strings" +) + +// Node represents a node in the expression tree. +// It is intentionally minimal – only the fields needed for the parser. +// Users can extend it with more information if required. + +type Node interface { + String() string +} + +// ValueNode represents a literal value (number, string, boolean, null) or a named value. +// The Kind field indicates the type. +// For named values the Value is nil. + +type ValueNode struct { + Kind TokenKind + Value interface{} +} + +// FunctionNode represents a function call with arguments. + +type FunctionNode struct { + Name string + Args []Node +} + +// BinaryNode represents a binary operator. + +type BinaryNode struct { + Op string + Left Node + Right Node +} + +// UnaryNode represents a unary operator. + +type UnaryNode struct { + Op string + Operand Node +} + +// Parser holds the lexer and the stacks used by the shunting‑yard algorithm. + +type Parser struct { + lexer *Lexer + tokens []Token + pos int + ops []OpToken + vals []Node +} + +type OpToken struct { + Token + StartPos int +} + +func precedence(tkn Token) int { + switch tkn.Kind { + case TokenKindStartGroup: + return 20 + case TokenKindStartIndex, TokenKindStartParameters, TokenKindDereference: + return 19 + case TokenKindLogicalOperator: + switch tkn.Raw { + case "!": + return 16 + case ">", ">=", "<", "<=": + return 11 + case "==", "!=": + return 10 + case "&&": + return 6 + case "||": + return 5 + } + case TokenKindEndGroup, TokenKindEndIndex, TokenKindEndParameters, TokenKindSeparator: + return 1 + } + return 0 +} + +// Parse parses the expression and returns the root node. +func Parse(expression string) (Node, error) { + lexer := NewLexer(expression, 0) + p := &Parser{} + // Tokenise all tokens + if err := p.initWithLexer(lexer); err != nil { + return nil, err + } + return p.parse() +} + +func (p *Parser) parse() (Node, error) { + // Shunting‑yard algorithm + for p.pos < len(p.tokens) { + tok := p.tokens[p.pos] + p.pos++ + switch tok.Kind { + case TokenKindNumber, TokenKindString, TokenKindBoolean, TokenKindNull: + p.pushValue(&ValueNode{Kind: tok.Kind, Value: tok.Value}) + case TokenKindNamedValue, TokenKindPropertyName, TokenKindWildcard: + p.pushValue(&ValueNode{Kind: tok.Kind, Value: tok.Raw}) + case TokenKindFunction: + p.pushFunc(tok, len(p.vals)) + case TokenKindStartParameters, TokenKindStartGroup, TokenKindStartIndex, TokenKindLogicalOperator, TokenKindDereference: + if err := p.pushOp(tok); err != nil { + return nil, err + } + case TokenKindSeparator: + if err := p.popGroup(TokenKindStartParameters); err != nil { + return nil, err + } + case TokenKindEndParameters: + if err := p.pushFuncValue(); err != nil { + return nil, err + } + case TokenKindEndGroup: + if err := p.popGroup(TokenKindStartGroup); err != nil { + return nil, err + } + + p.ops = p.ops[:len(p.ops)-1] + case TokenKindEndIndex: + if err := p.popGroup(TokenKindStartIndex); err != nil { + return nil, err + } + + // pop the start parameters + p.ops = p.ops[:len(p.ops)-1] + right := p.vals[len(p.vals)-1] + p.vals = p.vals[:len(p.vals)-1] + left := p.vals[len(p.vals)-1] + p.vals = p.vals[:len(p.vals)-1] + p.vals = append(p.vals, &BinaryNode{Op: "[", Left: left, Right: right}) + } + } + for len(p.ops) > 0 { + if err := p.popOp(); err != nil { + return nil, err + } + } + if len(p.vals) != 1 { + return nil, errors.New("invalid expression") + } + return p.vals[0], nil +} + +func (p *Parser) pushFuncValue() error { + if err := p.popGroup(TokenKindStartParameters); err != nil { + return err + } + + // pop the start parameters + p.ops = p.ops[:len(p.ops)-1] + // create function node + fnTok := p.ops[len(p.ops)-1] + if fnTok.Kind != TokenKindFunction { + return errors.New("expected function token") + } + p.ops = p.ops[:len(p.ops)-1] + // collect arguments + args := []Node{} + for len(p.vals) > fnTok.StartPos { + args = append([]Node{p.vals[len(p.vals)-1]}, args...) + p.vals = p.vals[:len(p.vals)-1] + } + p.pushValue(&FunctionNode{Name: fnTok.Raw, Args: args}) + return nil +} + +func (p *Parser) initWithLexer(lexer *Lexer) error { + p.lexer = lexer + for { + tok := lexer.Next() + if tok == nil { + break + } + if tok.Kind == TokenKindUnexpected { + return fmt.Errorf("unexpected token %s at position %d", tok.Raw, tok.Index) + } + p.tokens = append(p.tokens, *tok) + } + return nil +} + +func (p *Parser) popGroup(kind TokenKind) error { + for len(p.ops) > 0 && p.ops[len(p.ops)-1].Kind != kind { + if err := p.popOp(); err != nil { + return err + } + } + if len(p.ops) == 0 { + return errors.New("mismatched parentheses") + } + return nil +} + +func (p *Parser) pushValue(v Node) { + p.vals = append(p.vals, v) +} + +func (p *Parser) pushOp(t Token) error { + for len(p.ops) > 0 { + top := p.ops[len(p.ops)-1] + if precedence(top.Token) >= precedence(t) && + top.Kind != TokenKindStartGroup && + top.Kind != TokenKindStartIndex && + top.Kind != TokenKindStartParameters && + top.Kind != TokenKindSeparator { + if err := p.popOp(); err != nil { + return err + } + } else { + break + } + } + p.ops = append(p.ops, OpToken{Token: t}) + return nil +} + +func (p *Parser) pushFunc(t Token, start int) { + p.ops = append(p.ops, OpToken{Token: t, StartPos: start}) +} + +func (p *Parser) popOp() error { + if len(p.ops) == 0 { + return nil + } + op := p.ops[len(p.ops)-1] + p.ops = p.ops[:len(p.ops)-1] + switch op.Kind { + case TokenKindLogicalOperator: + if op.Raw == "!" { + if len(p.vals) < 1 { + return errors.New("insufficient operands") + } + right := p.vals[len(p.vals)-1] + p.vals = p.vals[:len(p.vals)-1] + p.vals = append(p.vals, &UnaryNode{Op: op.Raw, Operand: right}) + } else { + if len(p.vals) < 2 { + return errors.New("insufficient operands") + } + right := p.vals[len(p.vals)-1] + left := p.vals[len(p.vals)-2] + p.vals = p.vals[:len(p.vals)-2] + p.vals = append(p.vals, &BinaryNode{Op: op.Raw, Left: left, Right: right}) + } + case TokenKindStartParameters: + // unary operator '!' handled elsewhere + case TokenKindDereference: + if len(p.vals) < 2 { + return errors.New("insufficient operands") + } + right := p.vals[len(p.vals)-1] + left := p.vals[len(p.vals)-2] + p.vals = p.vals[:len(p.vals)-2] + p.vals = append(p.vals, &BinaryNode{Op: ".", Left: left, Right: right}) + } + return nil +} + +// String returns a string representation of the node. +func (n *ValueNode) String() string { return fmt.Sprintf("%v", n.Value) } + +// String returns a string representation of the node. +func (n *FunctionNode) String() string { + return fmt.Sprintf("%s(%s)", n.Name, strings.Join(funcArgs(n.Args), ", ")) +} + +func funcArgs(args []Node) []string { + res := []string{} + for _, a := range args { + res = append(res, a.String()) + } + return res +} + +// String returns a string representation of the node. +func (n *BinaryNode) String() string { + return fmt.Sprintf("(%s %s %s)", n.Left.String(), n.Op, n.Right.String()) +} + +// String returns a string representation of the node. +func (n *UnaryNode) String() string { return fmt.Sprintf("(%s%s)", n.Op, n.Operand.String()) } + +func VisitNode(exprNode Node, callback func(node Node)) { + callback(exprNode) + switch node := exprNode.(type) { + case *FunctionNode: + for _, arg := range node.Args { + VisitNode(arg, callback) + } + case *UnaryNode: + VisitNode(node.Operand, callback) + case *BinaryNode: + VisitNode(node.Left, callback) + VisitNode(node.Right, callback) + } +} diff --git a/internal/expr/lexer.go b/internal/expr/lexer.go new file mode 100644 index 00000000..b6f1b764 --- /dev/null +++ b/internal/expr/lexer.go @@ -0,0 +1,361 @@ +package workflow + +import ( + "math" + "slices" + "strconv" + "strings" + "unicode" +) + +// TokenKind represents the type of token returned by the lexer. +// The values mirror the C# TokenKind enum. +// +// Note: The names are kept identical to the C# implementation for +// easier mapping when porting the parser. +// +// The lexer is intentionally simple – it only tokenises the subset of +// expressions that are used in GitHub Actions workflow `if:` expressions. +// It does not evaluate the expression – that is left to the parser. + +type TokenKind int + +const ( + TokenKindStartGroup TokenKind = iota + TokenKindStartIndex + TokenKindEndGroup + TokenKindEndIndex + TokenKindSeparator + TokenKindDereference + TokenKindWildcard + TokenKindLogicalOperator + TokenKindNumber + TokenKindString + TokenKindBoolean + TokenKindNull + TokenKindPropertyName + TokenKindFunction + TokenKindNamedValue + TokenKindStartParameters + TokenKindEndParameters + TokenKindUnexpected +) + +// Token represents a single lexical token. +// Raw holds the original text, Value holds the parsed value when applicable. +// Index is the start position in the source string. +// +// The struct is intentionally minimal – it only contains what the parser +// needs. If you need more information (e.g. token length) you can add it. + +type Token struct { + Kind TokenKind + Raw string + Value interface{} + Index int +} + +// Lexer holds the state while tokenising an expression. +// It is a direct port of the C# LexicalAnalyzer. +// +// Flags can be used to enable/disable features – for now we only support +// a single flag that mirrors ExpressionFlags.DTExpressionsV1. +// +// The lexer is not thread‑safe – reuse a single instance per expression. + +type Lexer struct { + expr string + flags int + index int + last *Token + stack []TokenKind // unclosed start tokens +} + +// NewLexer creates a new lexer for the given expression. +func NewLexer(expr string, flags int) *Lexer { + return &Lexer{expr: expr, flags: flags} +} + +func testTokenBoundary(c rune) bool { + switch c { + case '(', '[', ')', ']', ',', '.', + '!', '>', '<', '=', '&', '|': + return true + default: + return unicode.IsSpace(c) + } +} + +// Next returns the next token or nil if the end of the expression is reached. +func (l *Lexer) Next() *Token { + // Skip whitespace + for l.index < len(l.expr) && unicode.IsSpace(rune(l.expr[l.index])) { + l.index++ + } + if l.index >= len(l.expr) { + return nil + } + + c := l.expr[l.index] + switch c { + case '(': + l.index++ + // Function call or logical grouping + if l.last != nil && l.last.Kind == TokenKindFunction { + return l.createToken(TokenKindStartParameters, "(") + } + if l.flags&FlagV1 != 0 { + // V1 does not support grouping – treat as unexpected + return l.createToken(TokenKindUnexpected, "(") + } + return l.createToken(TokenKindStartGroup, "(") + case '[': + l.index++ + return l.createToken(TokenKindStartIndex, "[") + case ')': + l.index++ + if len(l.stack) > 0 && l.stack[len(l.stack)-1] == TokenKindStartParameters { + return l.createToken(TokenKindEndParameters, ")") + } + return l.createToken(TokenKindEndGroup, ")") + case ']': + l.index++ + return l.createToken(TokenKindEndIndex, "]") + case ',': + l.index++ + return l.createToken(TokenKindSeparator, ",") + case '*': + l.index++ + return l.createToken(TokenKindWildcard, "*") + case '\'': + return l.readString() + case '!', '>', '<', '=', '&', '|': + if l.flags&FlagV1 != 0 { + l.index++ + return l.createToken(TokenKindUnexpected, string(c)) + } + return l.readOperator() + default: + return l.defaultNext(c) + } +} + +func (l *Lexer) defaultNext(c byte) *Token { + if c == '.' { + // Could be number or dereference + if l.last == nil || l.last.Kind == TokenKindSeparator || l.last.Kind == TokenKindStartGroup || l.last.Kind == TokenKindStartIndex || l.last.Kind == TokenKindStartParameters || l.last.Kind == TokenKindLogicalOperator { + return l.readNumber() + } + l.index++ + return l.createToken(TokenKindDereference, ".") + } + if c == '-' || c == '+' || unicode.IsDigit(rune(c)) { + return l.readNumber() + } + return l.readKeyword() +} + +// Helper to create a token and update lexer state. +func (l *Lexer) createToken(kind TokenKind, raw string) *Token { + // Token order check + if !l.checkLastToken(kind, raw) { + // Illegal token sequence + return &Token{Kind: TokenKindUnexpected, Raw: raw, Index: l.index} + } + tok := &Token{Kind: kind, Raw: raw, Index: l.index} + l.last = tok + // Manage stack for grouping + switch kind { + case TokenKindStartGroup, TokenKindStartIndex, TokenKindStartParameters: + l.stack = append(l.stack, kind) + case TokenKindEndGroup, TokenKindEndIndex, TokenKindEndParameters: + if len(l.stack) > 0 { + l.stack = l.stack[:len(l.stack)-1] + } + } + return tok +} + +// nil last token represented by nil +func (l *Lexer) getLastKind() *TokenKind { + var lastKind *TokenKind + if l.last != nil { + lastKind = &l.last.Kind + } + return lastKind +} + +// checkLastToken verifies that the token sequence is legal based on the last token. +func (l *Lexer) checkLastToken(kind TokenKind, raw string) bool { + lastKind := l.getLastKind() + + // Helper to check if lastKind is in allowed list + allowed := func(allowedKinds ...TokenKind) bool { + return lastKind != nil && slices.Contains(allowedKinds, *lastKind) + } + // For nil last, we treat as no previous token + // Define allowed previous kinds for each token kind + switch kind { + case TokenKindStartGroup: + return lastKind == nil || allowed(TokenKindSeparator, TokenKindStartGroup, TokenKindStartParameters, TokenKindStartIndex, TokenKindLogicalOperator) + case TokenKindStartIndex: + return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindPropertyName, TokenKindNamedValue) + case TokenKindStartParameters: + return allowed(TokenKindFunction) + case TokenKindEndGroup: + return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue) + case TokenKindEndIndex: + return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue) + case TokenKindEndParameters: + return allowed(TokenKindStartParameters, TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue) + case TokenKindSeparator: + return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue) + case TokenKindWildcard: + return allowed(TokenKindStartIndex, TokenKindDereference) + case TokenKindDereference: + return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindPropertyName, TokenKindNamedValue) + case TokenKindLogicalOperator: + if raw == "!" { // "!" + return lastKind == nil || allowed(TokenKindSeparator, TokenKindStartGroup, TokenKindStartParameters, TokenKindStartIndex, TokenKindLogicalOperator) + } + return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue) + case TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString: + return lastKind == nil || allowed(TokenKindSeparator, TokenKindStartIndex, TokenKindStartGroup, TokenKindStartParameters, TokenKindLogicalOperator) + case TokenKindPropertyName: + return allowed(TokenKindDereference) + case TokenKindFunction, TokenKindNamedValue: + return lastKind == nil || allowed(TokenKindSeparator, TokenKindStartIndex, TokenKindStartGroup, TokenKindStartParameters, TokenKindLogicalOperator) + default: + return true + } +} + +// readNumber parses a numeric literal. +func (l *Lexer) readNumber() *Token { + start := l.index + periods := 0 + for l.index < len(l.expr) { + ch := l.expr[l.index] + if ch == '.' { + periods++ + } + if testTokenBoundary(rune(ch)) && ch != '.' { + break + } + l.index++ + } + raw := l.expr[start:l.index] + if len(raw) > 2 { + switch raw[:2] { + case "0x", "0o": + tok := l.createToken(TokenKindNumber, raw) + if i, err := strconv.ParseInt(raw, 0, 32); err == nil { + tok.Value = float64(i) + return tok + } + } + } + // Try to parse as float64 + var val interface{} = raw + if f, err := strconv.ParseFloat(raw, 64); err == nil { + val = f + } + tok := l.createToken(TokenKindNumber, raw) + tok.Value = val + return tok +} + +// readString parses a single‑quoted string literal. +func (l *Lexer) readString() *Token { + start := l.index + l.index++ // skip opening quote + var sb strings.Builder + closed := false + for l.index < len(l.expr) { + ch := l.expr[l.index] + l.index++ + if ch == '\'' { + if l.index < len(l.expr) && l.expr[l.index] == '\'' { + // escaped quote + sb.WriteByte('\'') + l.index++ + continue + } + closed = true + break + } + sb.WriteByte(ch) + } + raw := l.expr[start:l.index] + tok := l.createToken(TokenKindString, raw) + if closed { + tok.Value = sb.String() + } else { + tok.Kind = TokenKindUnexpected + } + return tok +} + +// readOperator parses logical operators (==, !=, >, >=, etc.). +func (l *Lexer) readOperator() *Token { + start := l.index + l.index++ + if l.index < len(l.expr) { + two := l.expr[start : l.index+1] + switch two { + case "!=", ">=", "<=", "==", "&&", "||": + l.index++ + return l.createToken(TokenKindLogicalOperator, two) + } + } + ch := l.expr[start] + switch ch { + case '!', '>', '<': + return l.createToken(TokenKindLogicalOperator, string(ch)) + } + return l.createToken(TokenKindUnexpected, string(ch)) +} + +// readKeyword parses identifiers, booleans, null, etc. +func (l *Lexer) readKeyword() *Token { + start := l.index + for l.index < len(l.expr) && !unicode.IsSpace(rune(l.expr[l.index])) && !strings.ContainsRune("()[],.!<>==&|*", rune(l.expr[l.index])) { + l.index++ + } + raw := l.expr[start:l.index] + if l.last != nil && l.last.Kind == TokenKindDereference { + return l.createToken(TokenKindPropertyName, raw) + } + switch raw { + case "true": + tok := l.createToken(TokenKindBoolean, raw) + tok.Value = true + return tok + case "false": + tok := l.createToken(TokenKindBoolean, raw) + tok.Value = false + return tok + case "null": + return l.createToken(TokenKindNull, raw) + case "NaN": + tok := l.createToken(TokenKindNumber, raw) + tok.Value = math.NaN() + return tok + case "Infinity": + tok := l.createToken(TokenKindNumber, raw) + tok.Value = math.Inf(1) + return tok + } + if l.index < len(l.expr) && l.expr[l.index] == '(' { + return l.createToken(TokenKindFunction, raw) + } + return l.createToken(TokenKindNamedValue, raw) +} + +// Flag constants – only V1 is used for now. +const FlagV1 = 1 + +// UnclosedTokens returns the stack of unclosed start tokens. +func (l *Lexer) UnclosedTokens() []TokenKind { + return l.stack +} diff --git a/internal/expr/lexer_additional_test.go b/internal/expr/lexer_additional_test.go new file mode 100644 index 00000000..27478f16 --- /dev/null +++ b/internal/expr/lexer_additional_test.go @@ -0,0 +1,112 @@ +package workflow + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// TestLexerMultiple runs a set of expressions through the lexer and +// verifies that the produced token kinds and values match expectations. +func TestLexerMultiple(t *testing.T) { + cases := []struct { + expr string + expected []TokenKind + values []interface{} // optional, nil if not checking values + }{ + { + expr: "github.event_name == 'push'", + expected: []TokenKind{ + TokenKindNamedValue, // github + TokenKindDereference, + TokenKindPropertyName, // event_name + TokenKindLogicalOperator, // == + TokenKindString, // 'push' + }, + }, + { + expr: "github.event_name == 'push' && github.ref == 'refs/heads/main'", + expected: []TokenKind{ + TokenKindNamedValue, TokenKindDereference, TokenKindPropertyName, TokenKindLogicalOperator, TokenKindString, + TokenKindLogicalOperator, // && + TokenKindNamedValue, TokenKindDereference, TokenKindPropertyName, TokenKindLogicalOperator, TokenKindString, + }, + }, + { + expr: "contains(github.ref, 'refs/heads/')", + expected: []TokenKind{ + TokenKindFunction, // contains + TokenKindStartParameters, + TokenKindNamedValue, TokenKindDereference, TokenKindPropertyName, // github.ref + TokenKindSeparator, + TokenKindString, + TokenKindEndParameters, + }, + }, + { + expr: "matrix[0].name", + expected: []TokenKind{ + TokenKindNamedValue, // matrix + TokenKindStartIndex, + TokenKindNumber, + TokenKindEndIndex, + TokenKindDereference, + TokenKindPropertyName, // name + }, + }, + { + expr: "github.*", + expected: []TokenKind{ + TokenKindNamedValue, TokenKindDereference, TokenKindWildcard, + }, + }, + { + expr: "null", + expected: []TokenKind{TokenKindNull}, + }, + { + expr: "true", + expected: []TokenKind{TokenKindBoolean}, + values: []interface{}{true}, + }, + { + expr: "123", + expected: []TokenKind{TokenKindNumber}, + values: []interface{}{123.0}, + }, + { + expr: "(a && b)", + expected: []TokenKind{TokenKindStartGroup, TokenKindNamedValue, TokenKindLogicalOperator, TokenKindNamedValue, TokenKindEndGroup}, + }, + { + expr: "[1,2]", // Syntax Error + expected: []TokenKind{TokenKindUnexpected, TokenKindNumber, TokenKindSeparator, TokenKindNumber, TokenKindEndIndex}, + }, + { + expr: "'Hello i''s escaped'", + expected: []TokenKind{TokenKindString}, + values: []interface{}{"Hello i's escaped"}, + }, + } + + for _, tc := range cases { + lexer := NewLexer(tc.expr, 0) + var tokens []*Token + for { + tok := lexer.Next() + if tok == nil { + break + } + tokens = append(tokens, tok) + } + assert.Equal(t, len(tc.expected), len(tokens), "expression: %s", tc.expr) + for i, kind := range tc.expected { + assert.Equal(t, kind, tokens[i].Kind, "expr %s token %d", tc.expr, i) + } + if tc.values != nil { + for i, val := range tc.values { + assert.Equal(t, val, tokens[i].Value, "expr %s token %d value", tc.expr, i) + } + } + } +} diff --git a/internal/expr/lexer_test.go b/internal/expr/lexer_test.go new file mode 100644 index 00000000..39ec1d67 --- /dev/null +++ b/internal/expr/lexer_test.go @@ -0,0 +1,56 @@ +package workflow + +import ( + "math" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLexer(t *testing.T) { + input := "github.event_name == 'push' && github.ref == 'refs/heads/main'" + lexer := NewLexer(input, 0) + var tokens []*Token + for { + tok := lexer.Next() + if tok == nil || tok.Kind == TokenKindUnexpected { + break + } + tokens = append(tokens, tok) + } + for i, tok := range tokens { + t.Logf("Token %d: Kind=%v, Value=%v", i, tok.Kind, tok.Value) + } + assert.Equal(t, tokens[1].Kind, TokenKindDereference) +} + +func TestLexerNumbers(t *testing.T) { + table := []struct { + in string + out interface{} + }{ + {"-Infinity", math.Inf(-1)}, + {"Infinity", math.Inf(1)}, + {"2.5", float64(2.5)}, + {"3.3", float64(3.3)}, + {"1", float64(1)}, + {"-1", float64(-1)}, + {"0x34", float64(0x34)}, + {"0o34", float64(0o34)}, + } + for _, cs := range table { + lexer := NewLexer(cs.in, 0) + var tokens []*Token + for { + tok := lexer.Next() + if tok == nil || tok.Kind == TokenKindUnexpected { + break + } + tokens = append(tokens, tok) + } + require.Len(t, tokens, 1) + assert.Equal(t, cs.out, tokens[0].Value) + assert.Equal(t, cs.in, tokens[0].Raw) + } +} diff --git a/internal/model/anchors.go b/internal/model/anchors.go new file mode 100644 index 00000000..e68c74a7 --- /dev/null +++ b/internal/model/anchors.go @@ -0,0 +1,30 @@ +package model + +import ( + "errors" + + "gopkg.in/yaml.v3" +) + +// Assumes there is no cycle ensured via test TestVerifyCycleIsInvalid +func resolveAliases(node *yaml.Node) error { + switch node.Kind { + case yaml.AliasNode: + aliasTarget := node.Alias + if aliasTarget == nil { + return errors.New("unresolved alias node") + } + *node = *aliasTarget + if err := resolveAliases(node); err != nil { + return err + } + + case yaml.DocumentNode, yaml.MappingNode, yaml.SequenceNode: + for _, child := range node.Content { + if err := resolveAliases(child); err != nil { + return err + } + } + } + return nil +} diff --git a/internal/model/strategy_utils.go b/internal/model/strategy_utils.go new file mode 100644 index 00000000..edcb81eb --- /dev/null +++ b/internal/model/strategy_utils.go @@ -0,0 +1,242 @@ +package model + +import ( + "errors" + "fmt" + "strings" + + "gopkg.in/yaml.v3" +) + +// TraceWriter is an interface for logging trace information. +// Implementations can write to console, file, or any other sink. +type TraceWriter interface { + Info(format string, args ...interface{}) +} + +// StrategyResult holds the result of expanding a strategy. +// FlatMatrix contains the expanded matrix entries. +// IncludeMatrix contains entries that were added via include. +// FailFast indicates whether the job should fail fast. +// MaxParallel is the maximum parallelism allowed. +// MatrixKeys is the set of keys present in the matrix. +type StrategyResult struct { + FlatMatrix []map[string]yaml.Node + IncludeMatrix []map[string]yaml.Node + FailFast bool + MaxParallel *float64 + MatrixKeys map[string]struct{} +} + +type strategyContext struct { + jobTraceWriter TraceWriter + failFast bool + maxParallel float64 + matrix map[string][]yaml.Node + + flatMatrix []map[string]yaml.Node + includeMatrix []map[string]yaml.Node + + include []yaml.Node + exclude []yaml.Node +} + +func (strategyContext *strategyContext) handleInclude() error { + // Handle include logic + if len(strategyContext.include) > 0 { + for _, incNode := range strategyContext.include { + if incNode.Kind != yaml.MappingNode { + return fmt.Errorf("include entry is not a mapping node") + } + incMap := make(map[string]yaml.Node) + for i := 0; i < len(incNode.Content); i += 2 { + keyNode := incNode.Content[i] + valNode := incNode.Content[i+1] + if keyNode.Kind != yaml.ScalarNode { + return fmt.Errorf("include key is not scalar") + } + incMap[keyNode.Value] = *valNode + } + matched := false + for _, row := range strategyContext.flatMatrix { + match := true + for k, v := range incMap { + if rv, ok := row[k]; ok && !nodesEqual(rv, v) { + match = false + break + } + } + if match { + matched = true + // Add missing keys + strategyContext.jobTraceWriter.Info("Add missing keys %v", incMap) + for k, v := range incMap { + if _, ok := row[k]; !ok { + row[k] = v + } + } + } + } + if !matched { + if strategyContext.jobTraceWriter != nil { + strategyContext.jobTraceWriter.Info("Append include entry %v", incMap) + } + strategyContext.includeMatrix = append(strategyContext.includeMatrix, incMap) + } + } + } + return nil +} + +func (strategyContext *strategyContext) handleExclude() error { + // Handle exclude logic + if len(strategyContext.exclude) > 0 { + for _, exNode := range strategyContext.exclude { + // exNode is expected to be a mapping node + if exNode.Kind != yaml.MappingNode { + return fmt.Errorf("exclude entry is not a mapping node") + } + // Convert mapping to map[string]yaml.Node + exMap := make(map[string]yaml.Node) + for i := 0; i < len(exNode.Content); i += 2 { + keyNode := exNode.Content[i] + valNode := exNode.Content[i+1] + if keyNode.Kind != yaml.ScalarNode { + return fmt.Errorf("exclude key is not scalar") + } + exMap[keyNode.Value] = *valNode + } + // Remove matching rows + filtered := []map[string]yaml.Node{} + for _, row := range strategyContext.flatMatrix { + match := true + for k, v := range exMap { + if rv, ok := row[k]; !ok || !nodesEqual(rv, v) { + match = false + break + } + } + if !match { + filtered = append(filtered, row) + } else if strategyContext.jobTraceWriter != nil { + strategyContext.jobTraceWriter.Info("Removing %v from matrix due to exclude entry %v", row, exMap) + } + } + strategyContext.flatMatrix = filtered + } + } + return nil +} + +// ExpandStrategy expands the given strategy into a flat matrix and include matrix. +// It mimics the behavior of the C# StrategyUtils. The strategy parameter is expected +// to be populated from a YAML mapping that follows the GitHub Actions strategy schema. +func ExpandStrategy(strategy *Strategy, jobTraceWriter TraceWriter) (*StrategyResult, error) { + if strategy == nil { + return &StrategyResult{FlatMatrix: []map[string]yaml.Node{{}}, IncludeMatrix: []map[string]yaml.Node{}, FailFast: true}, nil + } + + // Initialize defaults + strategyContext := &strategyContext{ + jobTraceWriter: jobTraceWriter, + failFast: strategy.FailFast, + maxParallel: strategy.MaxParallel, + matrix: strategy.Matrix, + flatMatrix: []map[string]yaml.Node{{}}, + } + // Process matrix entries + for key, values := range strategyContext.matrix { + switch key { + case "include": + strategyContext.include = values + case "exclude": + strategyContext.exclude = values + default: + // Other keys are treated as matrix dimensions + // Expand each existing row with the new key/value pairs + next := []map[string]yaml.Node{} + for _, row := range strategyContext.flatMatrix { + for _, val := range values { + newRow := make(map[string]yaml.Node) + for k, v := range row { + newRow[k] = v + } + newRow[key] = val + next = append(next, newRow) + } + } + strategyContext.flatMatrix = next + } + } + + if err := strategyContext.handleExclude(); err != nil { + return nil, err + } + + if len(strategyContext.flatMatrix) == 0 { + if jobTraceWriter != nil { + jobTraceWriter.Info("Matrix is empty, adding an empty entry") + } + strategyContext.flatMatrix = []map[string]yaml.Node{{}} + } + + // Enforce job matrix limit of github + if len(strategyContext.flatMatrix) > 256 { + if jobTraceWriter != nil { + jobTraceWriter.Info("Failure: Matrix contains more than 256 entries after exclude") + } + return nil, errors.New("matrix contains more than 256 entries") + } + + // Build matrix keys set + matrixKeys := make(map[string]struct{}) + if len(strategyContext.flatMatrix) > 0 { + for k := range strategyContext.flatMatrix[0] { + matrixKeys[k] = struct{}{} + } + } + + if err := strategyContext.handleInclude(); err != nil { + return nil, err + } + + return &StrategyResult{ + FlatMatrix: strategyContext.flatMatrix, + IncludeMatrix: strategyContext.includeMatrix, + FailFast: strategyContext.failFast, + MaxParallel: &strategyContext.maxParallel, + MatrixKeys: matrixKeys, + }, nil +} + +// nodesEqual compares two yaml.Node values for equality. +func nodesEqual(a, b yaml.Node) bool { + return DeepEquals(a, b, true) +} + +// GetDefaultDisplaySuffix returns a string like "(foo, bar, baz)". +// Empty items are ignored. If all items are empty the result is "". +func GetDefaultDisplaySuffix(items []string) string { + var b strings.Builder // efficient string concatenation + + first := true // true until we write the first non‑empty item + + for _, mk := range items { + if mk == "" { // Go has no null string, so we only need to check for empty + continue + } + if first { + b.WriteString("(") + first = false + } else { + b.WriteString(", ") + } + b.WriteString(mk) + } + + if !first { // we wrote at least one item + b.WriteString(")") + } + + return b.String() +} diff --git a/internal/model/strategy_utils_test.go b/internal/model/strategy_utils_test.go new file mode 100644 index 00000000..cb0c6ec3 --- /dev/null +++ b/internal/model/strategy_utils_test.go @@ -0,0 +1,68 @@ +package model + +import ( + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +type EmptyTraceWriter struct { +} + +func (e *EmptyTraceWriter) Info(_ string, _ ...interface{}) { +} + +func TestStrategy(t *testing.T) { + table := []struct { + content string + flatmatrix int + includematrix int + }{ + {` +matrix: + label: + - a + - b + fields: + - a + - b +`, 4, 0}, + {` +matrix: + label: + - a + - b + include: + - label: a + x: self`, 2, 0, + }, + {` +matrix: + label: + - a + - b + include: + - label: c + x: self`, 2, 1, + }, + {` +matrix: + label: + - a + - b + exclude: + - label: a`, 1, 0, + }, + } + + for _, tc := range table { + var strategy Strategy + err := yaml.Unmarshal([]byte(tc.content), &strategy) + require.NoError(t, err) + res, err := ExpandStrategy(&strategy, &EmptyTraceWriter{}) + require.NoError(t, err) + require.Len(t, res.FlatMatrix, tc.flatmatrix) + require.Len(t, res.IncludeMatrix, tc.includematrix) + } +} diff --git a/internal/model/token_utils.go b/internal/model/token_utils.go new file mode 100644 index 00000000..dce92fb8 --- /dev/null +++ b/internal/model/token_utils.go @@ -0,0 +1,148 @@ +package model + +import ( + "strings" + + v2 "github.com/actions-oss/act-cli/internal/eval/v2" + "gopkg.in/yaml.v3" +) + +// DeepEquals compares two yaml.Node values recursively. +// It supports scalar, mapping and sequence nodes and allows +// an optional partial match for mappings and sequences. +func DeepEquals(a, b yaml.Node, partialMatch bool) bool { + // Scalar comparison + if a.Kind == yaml.ScalarNode && b.Kind == yaml.ScalarNode { + return scalarEquals(a, b) + } + + // Mapping comparison + if a.Kind == yaml.MappingNode && b.Kind == yaml.MappingNode { + return deepMapEquals(a, b, partialMatch) + } + + // Sequence comparison + if a.Kind == yaml.SequenceNode && b.Kind == yaml.SequenceNode { + return deepSequenceEquals(a, b, partialMatch) + } + + // Different kinds are not equal + return false +} + +func scalarEquals(a, b yaml.Node) bool { + var left, right any + return a.Decode(&left) == nil && b.Decode(&right) == nil && v2.CreateIntermediateResult(v2.NewEvaluationContext(), left).AbstractEqual(v2.CreateIntermediateResult(v2.NewEvaluationContext(), right)) +} + +func deepMapEquals(a, b yaml.Node, partialMatch bool) bool { + mapA := make(map[string]yaml.Node) + for i := 0; i < len(a.Content); i += 2 { + keyNode := a.Content[i] + valNode := a.Content[i+1] + if keyNode.Kind != yaml.ScalarNode { + return false + } + mapA[strings.ToLower(keyNode.Value)] = *valNode + } + mapB := make(map[string]yaml.Node) + for i := 0; i < len(b.Content); i += 2 { + keyNode := b.Content[i] + valNode := b.Content[i+1] + if keyNode.Kind != yaml.ScalarNode { + return false + } + mapB[strings.ToLower(keyNode.Value)] = *valNode + } + if partialMatch { + if len(mapA) < len(mapB) { + return false + } + } else { + if len(mapA) != len(mapB) { + return false + } + } + for k, vB := range mapB { + vA, ok := mapA[k] + if !ok || !DeepEquals(vA, vB, partialMatch) { + return false + } + } + return true +} + +func deepSequenceEquals(a, b yaml.Node, partialMatch bool) bool { + if partialMatch { + if len(a.Content) < len(b.Content) { + return false + } + } else { + if len(a.Content) != len(b.Content) { + return false + } + } + limit := len(b.Content) + if !partialMatch { + limit = len(a.Content) + } + for i := 0; i < limit; i++ { + if !DeepEquals(*a.Content[i], *b.Content[i], partialMatch) { + return false + } + } + return true +} + +// traverse walks a YAML node recursively. +func traverse(node *yaml.Node, omitKeys bool, result *[]*yaml.Node) { + if node == nil { + return + } + + *result = append(*result, node) + + switch node.Kind { + case yaml.MappingNode: + if omitKeys { + // node.Content: key0, val0, key1, val1, … + for i := 1; i < len(node.Content); i += 2 { // only the values + traverse(node.Content[i], omitKeys, result) + } + } else { + for _, child := range node.Content { + traverse(child, omitKeys, result) + } + } + case yaml.SequenceNode: + // For all other node kinds (Scalar, Sequence, Alias, etc.) + for _, child := range node.Content { + traverse(child, omitKeys, result) + } + } +} + +// GetDisplayStrings implements the LINQ expression: +// +// from displayitem in keys.SelectMany(key => item[key].Traverse(true)) +// where !(displayitem is SequenceToken || displayitem is MappingToken) +// select displayitem.ToString() +func GetDisplayStrings(keys []string, item map[string]*yaml.Node) []string { + var res []string + + for _, k := range keys { + if node, ok := item[k]; ok { + var all []*yaml.Node + traverse(node, true, &all) // include the parent node itself + + for _, n := range all { + // Keep only scalars – everything else is dropped + if n.Kind == yaml.ScalarNode { + res = append(res, n.Value) + } + } + } + } + + return res +} diff --git a/internal/model/workflow_state.go b/internal/model/workflow_state.go new file mode 100644 index 00000000..d5451448 --- /dev/null +++ b/internal/model/workflow_state.go @@ -0,0 +1,277 @@ +package model + +import "gopkg.in/yaml.v3" + +type JobStatus int + +const ( + JobStatusPending JobStatus = iota + JobStatusDependenciesReady + JobStatusBlocked + JobStatusCompleted +) + +type JobState struct { + JobID string // Workflow path to job, incl matrix and parent jobids + Result string // Actions Job Result + Outputs map[string]string // Returned Outputs + State JobStatus + Strategy []MatrixJobState +} + +type MatrixJobState struct { + Matrix map[string]any + Name string + Result string + Outputs map[string]string // Returned Outputs + State JobStatus +} + +type WorkflowStatus int + +const ( + WorkflowStatusPending WorkflowStatus = iota + WorkflowStatusDependenciesReady + WorkflowStatusBlocked + WorkflowStatusCompleted +) + +type WorkflowState struct { + Name string + RunName string + Jobs JobState + StateWorkflowStatus WorkflowStatus +} + +type Workflow struct { + On *On `yaml:"on,omitempty"` + Name string `yaml:"name,omitempty"` + Description string `yaml:"description,omitempty"` + RunName yaml.Node `yaml:"run-name,omitempty"` + Permissions *Permissions `yaml:"permissions,omitempty"` + Env yaml.Node `yaml:"env,omitempty"` + Defaults yaml.Node `yaml:"defaults,omitempty"` + Concurrency yaml.Node `yaml:"concurrency,omitempty"` // Two layouts + Jobs map[string]Job `yaml:"jobs,omitempty"` +} + +type On struct { + Data map[string]yaml.Node `yaml:"-"` + WorkflowDispatch *WorkflowDispatch `yaml:"workflow_dispatch,omitempty"` + WorkflowCall *WorkflowCall `yaml:"workflow_call,omitempty"` + Schedule []Cron `yaml:"schedule,omitempty"` +} + +type Cron struct { + Cron string `yaml:"cron,omitempty"` +} + +func (a *On) UnmarshalYAML(node *yaml.Node) error { + switch node.Kind { + case yaml.ScalarNode: + var s string + if err := node.Decode(&s); err != nil { + return err + } + a.Data = map[string]yaml.Node{} + a.Data[s] = yaml.Node{} + case yaml.SequenceNode: + var s []string + if err := node.Decode(&s); err != nil { + return err + } + a.Data = map[string]yaml.Node{} + for _, v := range s { + a.Data[v] = yaml.Node{} + } + default: + if err := node.Decode(&a.Data); err != nil { + return err + } + type OnObj On + if err := node.Decode((*OnObj)(a)); err != nil { + return err + } + } + return nil +} + +func (a *On) MarshalYAML() (interface{}, error) { + return a.Data, nil +} + +var ( + _ yaml.Unmarshaler = &On{} + _ yaml.Marshaler = &On{} + _ yaml.Unmarshaler = &Concurrency{} + _ yaml.Unmarshaler = &RunsOn{} + _ yaml.Unmarshaler = &ImplicitStringArray{} + _ yaml.Unmarshaler = &Environment{} +) + +type WorkflowDispatch struct { + Inputs map[string]Input `yaml:"inputs,omitempty"` +} + +type Input struct { + Description string `yaml:"description,omitempty"` + Type string `yaml:"type,omitempty"` + Default string `yaml:"default,omitempty"` + Required bool `yaml:"required,omitempty"` +} + +type WorkflowCall struct { + Inputs map[string]Input `yaml:"inputs,omitempty"` + Secrets map[string]Secret `yaml:"secrets,omitempty"` + Outputs map[string]Output `yaml:"outputs,omitempty"` +} + +type Secret struct { + Description string `yaml:"description,omitempty"` + Required bool `yaml:"required,omitempty"` +} + +type Output struct { + Description string `yaml:"description,omitempty"` + Value yaml.Node `yaml:"value,omitempty"` +} + +type Job struct { + Needs ImplicitStringArray `yaml:"needs,omitempty"` + Permissions *Permissions `yaml:"permissions,omitempty"` + Strategy yaml.Node `yaml:"strategy,omitempty"` + Name yaml.Node `yaml:"name,omitempty"` + Concurrency yaml.Node `yaml:"concurrency,omitempty"` + // Reusable Workflow + Uses yaml.Node `yaml:"uses,omitempty"` + With yaml.Node `yaml:"with,omitempty"` + Secrets yaml.Node `yaml:"secrets,omitempty"` + // Runner Job + RunsOn yaml.Node `yaml:"runs-on,omitempty"` + Defaults yaml.Node `yaml:"defaults,omitempty"` + TimeoutMinutes yaml.Node `yaml:"timeout-minutes,omitempty"` + Container yaml.Node `yaml:"container,omitempty"` + Services yaml.Node `yaml:"services,omitempty"` + Env yaml.Node `yaml:"env,omitempty"` + Steps []yaml.Node `yaml:"steps,omitempty"` + Outputs yaml.Node `yaml:"outputs,omitempty"` +} + +type ImplicitStringArray []string + +func (a *ImplicitStringArray) UnmarshalYAML(node *yaml.Node) error { + if node.Kind == yaml.ScalarNode { + var s string + if err := node.Decode(&s); err != nil { + return err + } + *a = []string{s} + return nil + } + return node.Decode((*[]string)(a)) +} + +type Permissions map[string]string + +func (p *Permissions) UnmarshalYAML(node *yaml.Node) error { + if node.Kind == yaml.ScalarNode { + var s string + if err := node.Decode(&s); err != nil { + return err + } + var perm string + switch s { + case "read-all": + perm = "read" + case "write-all": + perm = "write" + default: + return nil + } + (*p)["actions"] = perm + (*p)["attestations"] = perm + (*p)["contents"] = perm + (*p)["checks"] = perm + (*p)["deployments"] = perm + (*p)["discussions"] = perm + (*p)["id-token"] = perm + (*p)["issues"] = perm + (*p)["models"] = perm + (*p)["packages"] = perm + (*p)["pages"] = perm + (*p)["pull-requests"] = perm + (*p)["repository-projects"] = perm + (*p)["security-events"] = perm + (*p)["statuses"] = perm + return nil + } + return node.Decode((*map[string]string)(p)) +} + +type Strategy struct { + Matrix map[string][]yaml.Node `yaml:"matrix"` + MaxParallel float64 `yaml:"max-parallel"` + FailFast bool `yaml:"fail-fast"` +} + +type Concurrency struct { + Group string `yaml:"group"` + CancelInProgress bool `yaml:"cancel-in-progress"` +} + +func (c *Concurrency) UnmarshalYAML(node *yaml.Node) error { + if node.Kind == yaml.ScalarNode { + var s string + if err := node.Decode(&s); err != nil { + return err + } + c.Group = s + return nil + } + type ConcurrencyObj Concurrency + return node.Decode((*ConcurrencyObj)(c)) +} + +type Environment struct { + Name string `yaml:"name"` + URL yaml.Node `yaml:"url"` +} + +func (e *Environment) UnmarshalYAML(node *yaml.Node) error { + if node.Kind == yaml.ScalarNode { + var s string + if err := node.Decode(&s); err != nil { + return err + } + e.Name = s + return nil + } + type EnvironmentObj Environment + return node.Decode((*EnvironmentObj)(e)) +} + +type RunsOn struct { + Labels []string `yaml:"labels"` + Group string `yaml:"group,omitempty"` +} + +func (a *RunsOn) UnmarshalYAML(node *yaml.Node) error { + if node.Kind == yaml.ScalarNode { + var s string + if err := node.Decode(&s); err != nil { + return err + } + a.Labels = []string{s} + return nil + } + if node.Kind == yaml.SequenceNode { + var s []string + if err := node.Decode(&s); err != nil { + return err + } + a.Labels = s + return nil + } + type RunsOnObj RunsOn + return node.Decode((*RunsOnObj)(a)) +} diff --git a/internal/model/workflow_state_test.go b/internal/model/workflow_state_test.go new file mode 100644 index 00000000..9cfe8eda --- /dev/null +++ b/internal/model/workflow_state_test.go @@ -0,0 +1,141 @@ +package model + +import ( + "context" + "testing" + + v2 "github.com/actions-oss/act-cli/internal/eval/v2" + "github.com/actions-oss/act-cli/internal/templateeval" + "github.com/actions-oss/act-cli/pkg/schema" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +func TestParseWorkflow(t *testing.T) { + ee := &templateeval.ExpressionEvaluator{ + EvaluationContext: v2.EvaluationContext{ + Variables: v2.CaseInsensitiveObject[any]{}, + Functions: v2.GetFunctions(), + }, + } + var node yaml.Node + err := yaml.Unmarshal([]byte(` +on: push +run-name: ${{ fromjson('{}') }} +jobs: + _: + name: ${{ github.ref_name }} + steps: + - run: echo Hello World + env: + TAG: ${{ env.global }} +`), &node) + require.NoError(t, err) + err = ee.EvaluateYamlNode(context.Background(), node.Content[0], &schema.Node{ + Definition: "workflow-root", + Schema: schema.GetWorkflowSchema(), + }) + require.NoError(t, err) + + ee.RestrictEval = true + ee.EvaluationContext.Variables = v2.CaseInsensitiveObject[any]{ + "github": v2.CaseInsensitiveObject[any]{ + "ref_name": "self", + }, + "vars": v2.CaseInsensitiveObject[any]{}, + "inputs": v2.CaseInsensitiveObject[any]{}, + } + + err = ee.EvaluateYamlNode(context.Background(), node.Content[0], &schema.Node{ + Definition: "workflow-root", + Schema: schema.GetWorkflowSchema(), + }) + require.Error(t, err) + var myw Workflow + require.NoError(t, node.Decode(&myw)) +} + +func TestParseWorkflowCall(t *testing.T) { + ee := &templateeval.ExpressionEvaluator{ + EvaluationContext: v2.EvaluationContext{ + Variables: v2.CaseInsensitiveObject[any]{}, + Functions: v2.GetFunctions(), + }, + } + var node yaml.Node + // jobs.test.outputs.test + err := yaml.Unmarshal([]byte(` +on: + workflow_call: + outputs: + test: + value: ${{ jobs.test.outputs.test }} # tojson(vars.raw) +run-name: ${{ github.ref_name }} +jobs: + _: + runs-on: ubuntu-latest + name: ${{ github.ref_name }} + steps: + - run: echo Hello World + env: + TAG: ${{ env.global }} +`), &node) + require.NoError(t, err) + require.NoError(t, resolveAliases(node.Content[0])) + require.NoError(t, (&schema.Node{ + Definition: "workflow-root", + Schema: schema.GetWorkflowSchema(), + }).UnmarshalYAML(node.Content[0])) + err = ee.EvaluateYamlNode(context.Background(), node.Content[0], &schema.Node{ + Definition: "workflow-root", + Schema: schema.GetWorkflowSchema(), + }) + require.NoError(t, err) + + var raw any + err = node.Content[0].Decode(&raw) + assert.NoError(t, err) + + ee.RestrictEval = true + ee.EvaluationContext.Variables = v2.CaseInsensitiveObject[any]{ + "github": v2.CaseInsensitiveObject[any]{ + "ref_name": "self", + }, + "vars": v2.CaseInsensitiveObject[any]{ + "raw": raw, + }, + "inputs": v2.CaseInsensitiveObject[any]{}, + "jobs": v2.CaseInsensitiveObject[any]{ + "test": v2.CaseInsensitiveObject[any]{ + "outputs": v2.CaseInsensitiveObject[any]{ + "test": "Hello World", + }, + }, + }, + } + + err = ee.EvaluateYamlNode(context.Background(), node.Content[0], &schema.Node{ + RestrictEval: true, + Definition: "workflow-root", + Schema: schema.GetWorkflowSchema(), + }) + require.NoError(t, err) + var myw Workflow + require.NoError(t, node.Decode(&myw)) + workflowCall := myw.On.WorkflowCall + if workflowCall != nil { + for _, out := range workflowCall.Outputs { + err = ee.EvaluateYamlNode(context.Background(), &out.Value, &schema.Node{ + RestrictEval: true, + Definition: "workflow-output-context", + Schema: schema.GetWorkflowSchema(), + }) + require.NoError(t, err) + require.Equal(t, "Hello World", out.Value.Value) + } + } + out, err := yaml.Marshal(&myw) + assert.NoError(t, err) + assert.NotEmpty(t, out) +} diff --git a/internal/templateeval/evaluate.go b/internal/templateeval/evaluate.go new file mode 100644 index 00000000..e96b2211 --- /dev/null +++ b/internal/templateeval/evaluate.go @@ -0,0 +1,195 @@ +package templateeval + +import ( + "context" + "fmt" + "regexp" + + v2 "github.com/actions-oss/act-cli/internal/eval/v2" + exprparser "github.com/actions-oss/act-cli/internal/expr" + "github.com/actions-oss/act-cli/pkg/schema" + "gopkg.in/yaml.v3" +) + +type ExpressionEvaluator struct { + RestrictEval bool + EvaluationContext v2.EvaluationContext +} + +func isImplExpr(snode *schema.Node) bool { + def := snode.Schema.GetDefinition(snode.Definition) + return def.String != nil && def.String.IsExpression +} + +func (ee ExpressionEvaluator) evaluateScalarYamlNode(_ context.Context, node *yaml.Node, snode *schema.Node) (*yaml.Node, error) { + var in string + if err := node.Decode(&in); err != nil { + return nil, err + } + expr, isExpr, err := rewriteSubExpression(in, false) + if err != nil { + return nil, err + } + if snode == nil || !isExpr && !isImplExpr(snode) || snode.Schema.GetDefinition(snode.Definition).String.IsExpression || ee.RestrictEval && node.Tag != "!!expr" { + return node, nil + } + parsed, err := exprparser.Parse(expr) + if err != nil { + return nil, err + } + canEvaluate := ee.canEvaluate(parsed, snode) + if !canEvaluate { + node.Tag = "!!expr" + return node, nil + } + + eval := v2.NewEvaluator(&ee.EvaluationContext) + res, err := eval.EvaluateRaw(expr) + if err != nil { + return nil, err + } + ret := &yaml.Node{} + if err := ret.Encode(res); err != nil { + return nil, err + } + ret.Line = node.Line + ret.Column = node.Column + // Finally check if we found a schema validation error + return ret, snode.UnmarshalYAML(ret) +} + +func (ee ExpressionEvaluator) canEvaluate(parsed exprparser.Node, snode *schema.Node) bool { + canEvaluate := true + for _, v := range snode.GetVariables() { + canEvaluate = canEvaluate && ee.EvaluationContext.Variables.Get(v) != nil + } + for _, v := range snode.GetFunctions() { + canEvaluate = canEvaluate && ee.EvaluationContext.Functions.Get(v.Name) != nil + } + exprparser.VisitNode(parsed, func(node exprparser.Node) { + switch el := node.(type) { + case *exprparser.FunctionNode: + canEvaluate = canEvaluate && ee.EvaluationContext.Functions.Get(el.Name) != nil + case *exprparser.ValueNode: + canEvaluate = canEvaluate && (el.Kind != exprparser.TokenKindNamedValue || ee.EvaluationContext.Variables.Get(el.Value.(string)) != nil) + } + }) + return canEvaluate +} + +func (ee ExpressionEvaluator) evaluateMappingYamlNode(ctx context.Context, node *yaml.Node, snode *schema.Node) (*yaml.Node, error) { + var ret *yaml.Node + // GitHub has this undocumented feature to merge maps, called insert directive + insertDirective := regexp.MustCompile(`\${{\s*insert\s*}}`) + for i := 0; i < len(node.Content)/2; i++ { + k := node.Content[i*2] + var sk string + shouldInsert := k.Decode(&sk) == nil && insertDirective.MatchString(sk) + changed := func() error { + if ret == nil { + ret = &yaml.Node{} + if err := ret.Encode(node); err != nil { + return err + } + ret.Content = ret.Content[:i*2] + } + return nil + } + var ek *yaml.Node + if !shouldInsert { + var err error + ek, err = ee.evaluateYamlNodeInternal(ctx, k, snode) + if err != nil { + return nil, err + } + if ek != nil { + if err := changed(); err != nil { + return nil, err + } + } else { + ek = k + } + } + v := node.Content[i*2+1] + ev, err := ee.evaluateYamlNodeInternal(ctx, v, snode.GetNestedNode(ek.Value)) + if err != nil { + return nil, err + } + if ev != nil { + if err := changed(); err != nil { + return nil, err + } + } else { + ev = v + } + // Merge the nested map of the insert directive + if shouldInsert { + if ev.Kind != yaml.MappingNode { + return nil, fmt.Errorf("failed to insert node %v into mapping %v unexpected type %v expected MappingNode", ev, node, ev.Kind) + } + if err := changed(); err != nil { + return nil, err + } + ret.Content = append(ret.Content, ev.Content...) + } else if ret != nil { + ret.Content = append(ret.Content, ek, ev) + } + } + return ret, nil +} + +func (ee ExpressionEvaluator) evaluateSequenceYamlNode(ctx context.Context, node *yaml.Node, snode *schema.Node) (*yaml.Node, error) { + var ret *yaml.Node + for i := 0; i < len(node.Content); i++ { + v := node.Content[i] + // Preserve nested sequences + wasseq := v.Kind == yaml.SequenceNode + ev, err := ee.evaluateYamlNodeInternal(ctx, v, snode.GetNestedNode("*")) + if err != nil { + return nil, err + } + if ev != nil { + if ret == nil { + ret = &yaml.Node{} + if err := ret.Encode(node); err != nil { + return nil, err + } + ret.Content = ret.Content[:i] + } + // GitHub has this undocumented feature to merge sequences / arrays + // We have a nested sequence via evaluation, merge the arrays + if ev.Kind == yaml.SequenceNode && !wasseq { + ret.Content = append(ret.Content, ev.Content...) + } else { + ret.Content = append(ret.Content, ev) + } + } else if ret != nil { + ret.Content = append(ret.Content, v) + } + } + return ret, nil +} + +func (ee ExpressionEvaluator) evaluateYamlNodeInternal(ctx context.Context, node *yaml.Node, snode *schema.Node) (*yaml.Node, error) { + switch node.Kind { + case yaml.ScalarNode: + return ee.evaluateScalarYamlNode(ctx, node, snode) + case yaml.MappingNode: + return ee.evaluateMappingYamlNode(ctx, node, snode) + case yaml.SequenceNode: + return ee.evaluateSequenceYamlNode(ctx, node, snode) + default: + return nil, nil + } +} + +func (ee ExpressionEvaluator) EvaluateYamlNode(ctx context.Context, node *yaml.Node, snode *schema.Node) error { + ret, err := ee.evaluateYamlNodeInternal(ctx, node, snode) + if err != nil { + return err + } + if ret != nil { + return ret.Decode(node) + } + return nil +} diff --git a/internal/templateeval/evaluate_test.go b/internal/templateeval/evaluate_test.go new file mode 100644 index 00000000..3d69a30b --- /dev/null +++ b/internal/templateeval/evaluate_test.go @@ -0,0 +1,94 @@ +package templateeval + +import ( + "context" + "testing" + + v2 "github.com/actions-oss/act-cli/internal/eval/v2" + "github.com/actions-oss/act-cli/pkg/schema" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +func TestEval(t *testing.T) { + cases := []struct { + name string + yamlInput string + restrict bool + variables v2.CaseInsensitiveObject[any] + expectErr bool + }{ + { + name: "NoError", + yamlInput: `on: push +run-name: ${{ github.ref_name }} +jobs: + _: + name: ${{ github.ref_name }} + steps: + - run: echo Hello World + env: + TAG: ${{ env.global }}`, + restrict: false, + expectErr: false, + }, + { + name: "Error", + yamlInput: `on: push +run-name: ${{ fromjson('{}') }} +jobs: + _: + name: ${{ github.ref_name }} + steps: + - run: echo Hello World + env: + TAG: ${{ env.global }}`, + restrict: true, + variables: v2.CaseInsensitiveObject[any]{ + "github": v2.CaseInsensitiveObject[any]{ + "ref_name": "self", + }, + "vars": v2.CaseInsensitiveObject[any]{}, + "inputs": v2.CaseInsensitiveObject[any]{}, + }, + expectErr: true, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + ee := &ExpressionEvaluator{ + EvaluationContext: v2.EvaluationContext{ + Variables: v2.CaseInsensitiveObject[any]{}, + Functions: v2.GetFunctions(), + }, + } + var node yaml.Node + err := yaml.Unmarshal([]byte(tc.yamlInput), &node) + require.NoError(t, err) + + err = ee.EvaluateYamlNode(context.Background(), node.Content[0], &schema.Node{ + Definition: "workflow-root", + Schema: schema.GetWorkflowSchema(), + }) + require.NoError(t, err) + + if tc.restrict { + ee.RestrictEval = true + } + if tc.variables != nil { + ee.EvaluationContext.Variables = tc.variables + } + + err = ee.EvaluateYamlNode(context.Background(), node.Content[0], &schema.Node{ + Definition: "workflow-root", + Schema: schema.GetWorkflowSchema(), + }) + if tc.expectErr { + require.Error(t, err) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/internal/templateeval/rewrite_subexpression.go b/internal/templateeval/rewrite_subexpression.go new file mode 100644 index 00000000..c9d5c034 --- /dev/null +++ b/internal/templateeval/rewrite_subexpression.go @@ -0,0 +1,75 @@ +package templateeval + +import ( + "fmt" + "regexp" + "strings" +) + +func escapeFormatString(in string) string { + return strings.ReplaceAll(strings.ReplaceAll(in, "{", "{{"), "}", "}}") +} + +func rewriteSubExpression(in string, forceFormat bool) (result string, isExpr bool, err error) { + // missing closing pair is an error + if !strings.Contains(in, "${{") { + return in, false, nil + } + + strPattern := regexp.MustCompile("(?:''|[^'])*'") + pos := 0 + exprStart := -1 + strStart := -1 + var results []string + formatOut := "" + for pos < len(in) { + if strStart > -1 { + matches := strPattern.FindStringIndex(in[pos:]) + if matches == nil { + return "", false, fmt.Errorf("unclosed string at position %d in %s", pos, in) + } + + strStart = -1 + pos += matches[1] + } else if exprStart > -1 { + exprEnd := strings.Index(in[pos:], "}}") + strStart = strings.Index(in[pos:], "'") + + if exprEnd > -1 && strStart > -1 { + if exprEnd < strStart { + strStart = -1 + } else { + exprEnd = -1 + } + } + + if exprEnd > -1 { + formatOut += fmt.Sprintf("{%d}", len(results)) + results = append(results, strings.TrimSpace(in[exprStart:pos+exprEnd])) + pos += exprEnd + 2 + exprStart = -1 + } else if strStart > -1 { + pos += strStart + 1 + } else { + return "", false, fmt.Errorf("unclosed expression at position %d in %s", pos, in) + } + } else { + exprStart = strings.Index(in[pos:], "${{") + if exprStart != -1 { + formatOut += escapeFormatString(in[pos : pos+exprStart]) + exprStart = pos + exprStart + 3 + pos = exprStart + } else { + formatOut += escapeFormatString(in[pos:]) + pos = len(in) + } + } + } + + if len(results) == 1 && formatOut == "{0}" && !forceFormat { + return results[0], true, nil + } + + out := fmt.Sprintf("format('%s', %s)", strings.ReplaceAll(formatOut, "'", "''"), strings.Join(results, ", ")) + return out, true, nil +} diff --git a/internal/templateeval/rewrite_subexpression_test.go b/internal/templateeval/rewrite_subexpression_test.go new file mode 100644 index 00000000..541a4ad3 --- /dev/null +++ b/internal/templateeval/rewrite_subexpression_test.go @@ -0,0 +1,115 @@ +package templateeval + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRewriteSubExpression_NoExpression(t *testing.T) { + in := "Hello world" + out, ok, err := rewriteSubExpression(in, false) + assert.NoError(t, err) + if ok { + t.Fatalf("expected ok=false for no expression, got true with output %q", out) + } + if out != in { + t.Fatalf("expected output %q, got %q", in, out) + } +} + +func TestRewriteSubExpression_SingleExpression(t *testing.T) { + in := "Hello ${{ 'world' }}" + out, ok, err := rewriteSubExpression(in, false) + assert.NoError(t, err) + if !ok { + t.Fatalf("expected ok=true for single expression, got false") + } + expected := "format('Hello {0}', 'world')" + if out != expected { + t.Fatalf("expected %q, got %q", expected, out) + } +} + +func TestRewriteSubExpression_MultipleExpressions(t *testing.T) { + in := "Hello ${{ 'world' }}, you are ${{ 'awesome' }}" + out, ok, err := rewriteSubExpression(in, false) + assert.NoError(t, err) + if !ok { + t.Fatalf("expected ok=true for multiple expressions, got false") + } + expected := "format('Hello {0}, you are {1}', 'world', 'awesome')" + if out != expected { + t.Fatalf("expected %q, got %q", expected, out) + } +} + +func TestRewriteSubExpression_ForceFormatSingle(t *testing.T) { + in := "Hello ${{ 'world' }}" + out, ok, err := rewriteSubExpression(in, true) + assert.NoError(t, err) + if !ok { + t.Fatalf("expected ok=true when forceFormat, got false") + } + expected := "format('Hello {0}', 'world')" + if out != expected { + t.Fatalf("expected %q, got %q", expected, out) + } +} + +func TestRewriteSubExpression_ForceFormatMultiple(t *testing.T) { + in := "Hello ${{ 'world' }}, you are ${{ 'awesome' }}" + out, ok, err := rewriteSubExpression(in, true) + assert.NoError(t, err) + if !ok { + t.Fatalf("expected ok=true when forceFormat, got false") + } + expected := "format('Hello {0}, you are {1}', 'world', 'awesome')" + if out != expected { + t.Fatalf("expected %q, got %q", expected, out) + } +} + +func TestRewriteSubExpression_UnclosedExpression(t *testing.T) { + in := "Hello ${{ 'world' " // missing closing }} + _, _, err := rewriteSubExpression(in, false) + assert.Error(t, err) + assert.Contains(t, err.Error(), "unclosed expression") +} + +func TestRewriteSubExpression_UnclosedString(t *testing.T) { + in := "Hello ${{ 'world }}, you are ${{ 'awesome' }}" + _, _, err := rewriteSubExpression(in, false) + assert.Error(t, err) + assert.Contains(t, err.Error(), "unclosed string") +} + +func TestRewriteSubExpression_EscapedStringLiteral(t *testing.T) { + // Two single quotes represent an escaped quote inside a string + in := "Hello ${{ 'It''s a test' }}" + out, ok, err := rewriteSubExpression(in, false) + assert.NoError(t, err) + assert.True(t, ok) + expected := "format('Hello {0}', 'It''s a test')" + assert.Equal(t, expected, out) +} + +func TestRewriteSubExpression_ExpressionAtEnd(t *testing.T) { + // Expression ends exactly at the string end – should be valid + in := "Hello ${{ 'world' }}" + out, ok, err := rewriteSubExpression(in, false) + assert.NoError(t, err) + assert.True(t, ok) + expected := "format('Hello {0}', 'world')" + assert.Equal(t, expected, out) +} + +func TestRewriteSubExpression_ExpressionNotAtEnd(t *testing.T) { + // Expression followed by additional text – should still be valid + in := "Hello ${{ 'world' }}, how are you?" + out, ok, err := rewriteSubExpression(in, false) + assert.NoError(t, err) + assert.True(t, ok) + expected := "format('Hello {0}, how are you?', 'world')" + assert.Equal(t, expected, out) +} diff --git a/pkg/exprparser/functions.go b/pkg/exprparser/functions.go index 3a261923..0f871403 100644 --- a/pkg/exprparser/functions.go +++ b/pkg/exprparser/functions.go @@ -1,245 +1,6 @@ package exprparser -import ( - "crypto/sha256" - "encoding/hex" - "encoding/json" - "fmt" - "io" - "io/fs" - "os" - "path/filepath" - "reflect" - "strconv" - "strings" - - "github.com/go-git/go-git/v5/plumbing/format/gitignore" - - "github.com/actions-oss/act-cli/pkg/model" - "github.com/rhysd/actionlint" -) - -func (impl *interperterImpl) contains(search, item reflect.Value) (bool, error) { - switch search.Kind() { - case reflect.String, reflect.Int, reflect.Float64, reflect.Bool, reflect.Invalid: - return strings.Contains( - strings.ToLower(impl.coerceToString(search).String()), - strings.ToLower(impl.coerceToString(item).String()), - ), nil - - case reflect.Slice: - for i := 0; i < search.Len(); i++ { - arrayItem := search.Index(i).Elem() - result, err := impl.compareValues(arrayItem, item, actionlint.CompareOpNodeKindEq) - if err != nil { - return false, err - } - - if isEqual, ok := result.(bool); ok && isEqual { - return true, nil - } - } - } - - return false, nil -} - -func (impl *interperterImpl) startsWith(searchString, searchValue reflect.Value) (bool, error) { - return strings.HasPrefix( - strings.ToLower(impl.coerceToString(searchString).String()), - strings.ToLower(impl.coerceToString(searchValue).String()), - ), nil -} - -func (impl *interperterImpl) endsWith(searchString, searchValue reflect.Value) (bool, error) { - return strings.HasSuffix( - strings.ToLower(impl.coerceToString(searchString).String()), - strings.ToLower(impl.coerceToString(searchValue).String()), - ), nil -} - -const ( - passThrough = iota - bracketOpen - bracketClose -) - -func (impl *interperterImpl) format(str reflect.Value, replaceValue ...reflect.Value) (string, error) { - input := impl.coerceToString(str).String() - output := "" - replacementIndex := "" - - state := passThrough - for _, character := range input { - switch state { - case passThrough: // normal buffer output - switch character { - case '{': - state = bracketOpen - - case '}': - state = bracketClose - - default: - output += string(character) - } - - case bracketOpen: // found { - switch character { - case '{': - output += "{" - replacementIndex = "" - state = passThrough - - case '}': - index, err := strconv.ParseInt(replacementIndex, 10, 32) - if err != nil { - return "", fmt.Errorf("the following format string is invalid: '%s'", input) - } - - replacementIndex = "" - - if len(replaceValue) <= int(index) { - return "", fmt.Errorf("the following format string references more arguments than were supplied: '%s'", input) - } - - output += impl.coerceToString(replaceValue[index]).String() - - state = passThrough - - default: - replacementIndex += string(character) - } - - case bracketClose: // found } - switch character { - case '}': - output += "}" - replacementIndex = "" - state = passThrough - - default: - panic("Invalid format parser state") - } - } - } - - if state != passThrough { - switch state { - case bracketOpen: - return "", fmt.Errorf("unclosed brackets. The following format string is invalid: '%s'", input) - - case bracketClose: - return "", fmt.Errorf("closing bracket without opening one. The following format string is invalid: '%s'", input) - } - } - - return output, nil -} - -func (impl *interperterImpl) join(array reflect.Value, sep reflect.Value) (string, error) { - separator := impl.coerceToString(sep).String() - switch array.Kind() { - case reflect.Slice: - var items []string - for i := 0; i < array.Len(); i++ { - items = append(items, impl.coerceToString(array.Index(i).Elem()).String()) - } - - return strings.Join(items, separator), nil - default: - return strings.Join([]string{impl.coerceToString(array).String()}, separator), nil - } -} - -func (impl *interperterImpl) toJSON(value reflect.Value) (string, error) { - if value.Kind() == reflect.Invalid { - return "null", nil - } - - json, err := json.MarshalIndent(value.Interface(), "", " ") - if err != nil { - return "", fmt.Errorf("cannot convert value to JSON. Cause: %v", err) - } - - return string(json), nil -} - -func (impl *interperterImpl) fromJSON(value reflect.Value) (interface{}, error) { - if value.Kind() != reflect.String { - return nil, fmt.Errorf("cannot parse non-string type %v as JSON", value.Kind()) - } - - var data interface{} - - err := json.Unmarshal([]byte(value.String()), &data) - if err != nil { - return nil, fmt.Errorf("invalid JSON: %v", err) - } - - return data, nil -} - -func (impl *interperterImpl) hashFiles(paths ...reflect.Value) (string, error) { - var ps []gitignore.Pattern - - const cwdPrefix = "." + string(filepath.Separator) - const excludeCwdPrefix = "!" + cwdPrefix - for _, path := range paths { - if path.Kind() == reflect.String { - cleanPath := path.String() - if strings.HasPrefix(cleanPath, cwdPrefix) { - cleanPath = cleanPath[len(cwdPrefix):] - } else if strings.HasPrefix(cleanPath, excludeCwdPrefix) { - cleanPath = "!" + cleanPath[len(excludeCwdPrefix):] - } - ps = append(ps, gitignore.ParsePattern(cleanPath, nil)) - } else { - return "", fmt.Errorf("non-string path passed to hashFiles") - } - } - - matcher := gitignore.NewMatcher(ps) - - var files []string - if err := filepath.Walk(impl.config.WorkingDir, func(path string, fi fs.FileInfo, err error) error { - if err != nil { - return err - } - sansPrefix := strings.TrimPrefix(path, impl.config.WorkingDir+string(filepath.Separator)) - parts := strings.Split(sansPrefix, string(filepath.Separator)) - if fi.IsDir() || !matcher.Match(parts, fi.IsDir()) { - return nil - } - files = append(files, path) - return nil - }); err != nil { - return "", fmt.Errorf("unable to filepath.Walk: %v", err) - } - - if len(files) == 0 { - return "", nil - } - - hasher := sha256.New() - - for _, file := range files { - f, err := os.Open(file) - if err != nil { - return "", fmt.Errorf("unable to os.Open: %v", err) - } - - if _, err := io.Copy(hasher, f); err != nil { - return "", fmt.Errorf("unable to io.Copy: %v", err) - } - - if err := f.Close(); err != nil { - return "", fmt.Errorf("unable to Close file: %v", err) - } - } - - return hex.EncodeToString(hasher.Sum(nil)), nil -} +import "github.com/actions-oss/act-cli/pkg/model" func (impl *interperterImpl) getNeedsTransitive(job *model.Job) []string { needs := job.Needs() @@ -252,11 +13,11 @@ func (impl *interperterImpl) getNeedsTransitive(job *model.Job) []string { return needs } -func (impl *interperterImpl) always() (bool, error) { +func (impl *interperterImpl) always() (interface{}, error) { return true, nil } -func (impl *interperterImpl) jobSuccess() (bool, error) { +func (impl *interperterImpl) jobSuccess() (interface{}, error) { jobs := impl.config.Run.Workflow.Jobs jobNeeds := impl.getNeedsTransitive(impl.config.Run.Job()) @@ -269,11 +30,11 @@ func (impl *interperterImpl) jobSuccess() (bool, error) { return true, nil } -func (impl *interperterImpl) stepSuccess() (bool, error) { +func (impl *interperterImpl) stepSuccess() (interface{}, error) { return impl.env.Job.Status == "success", nil } -func (impl *interperterImpl) jobFailure() (bool, error) { +func (impl *interperterImpl) jobFailure() (interface{}, error) { jobs := impl.config.Run.Workflow.Jobs jobNeeds := impl.getNeedsTransitive(impl.config.Run.Job()) @@ -286,10 +47,10 @@ func (impl *interperterImpl) jobFailure() (bool, error) { return false, nil } -func (impl *interperterImpl) stepFailure() (bool, error) { +func (impl *interperterImpl) stepFailure() (interface{}, error) { return impl.env.Job.Status == "failure", nil } -func (impl *interperterImpl) cancelled() (bool, error) { +func (impl *interperterImpl) cancelled() (interface{}, error) { return impl.env.Job.Status == "cancelled", nil } diff --git a/pkg/exprparser/functions_test.go b/pkg/exprparser/functions_test.go deleted file mode 100644 index 063238b8..00000000 --- a/pkg/exprparser/functions_test.go +++ /dev/null @@ -1,278 +0,0 @@ -package exprparser - -import ( - "path/filepath" - "testing" - - "github.com/actions-oss/act-cli/pkg/model" - "github.com/stretchr/testify/assert" -) - -func TestFunctionContains(t *testing.T) { - table := []struct { - input string - expected interface{} - name string - }{ - {"contains('search', 'item') }}", false, "contains-str-str"}, - {`cOnTaInS('Hello', 'll') }}`, true, "contains-str-casing"}, - {`contains('HELLO', 'll') }}`, true, "contains-str-casing"}, - {`contains('3.141592', 3.14) }}`, true, "contains-str-number"}, - {`contains(3.141592, '3.14') }}`, true, "contains-number-str"}, - {`contains(3.141592, 3.14) }}`, true, "contains-number-number"}, - {`contains(true, 'u') }}`, true, "contains-bool-str"}, - {`contains(null, '') }}`, true, "contains-null-str"}, - {`contains(fromJSON('["first","second"]'), 'first') }}`, true, "contains-item"}, - {`contains(fromJSON('[null,"second"]'), '') }}`, true, "contains-item-null-empty-str"}, - {`contains(fromJSON('["","second"]'), null) }}`, true, "contains-item-empty-str-null"}, - {`contains(fromJSON('[true,"second"]'), 'true') }}`, false, "contains-item-bool-arr"}, - {`contains(fromJSON('["true","second"]'), true) }}`, false, "contains-item-str-bool"}, - {`contains(fromJSON('[3.14,"second"]'), '3.14') }}`, true, "contains-item-number-str"}, - {`contains(fromJSON('[3.14,"second"]'), 3.14) }}`, true, "contains-item-number-number"}, - {`contains(fromJSON('["","second"]'), fromJSON('[]')) }}`, false, "contains-item-str-arr"}, - {`contains(fromJSON('["","second"]'), fromJSON('{}')) }}`, false, "contains-item-str-obj"}, - {`contains(fromJSON('[{ "first": { "result": "success" }},{ "second": { "result": "success" }}]').first.result, 'success') }}`, true, "multiple-contains-item"}, - {`contains(fromJSON('[{ "result": "success" },{ "result": "failure" }]').*.result, 'failure') }}`, true, "multiple-contains-dereferenced-failure-item"}, - {`contains(fromJSON('[{ "result": "failure" },{ "result": "success" }]').*.result, 'success') }}`, true, "multiple-contains-dereferenced-success-item"}, - {`contains(fromJSON('[{ "result": "failure" },{ "result": "success" }]').*.result, 'notthere') }}`, false, "multiple-contains-dereferenced-missing-item"}, - {`contains(fromJSON('[{ "result": "failure", "outputs": { "key": "val1" } },{ "result": "success", "outputs": { "key": "val2" } }]').*.outputs.key, 'val1') }}`, true, "multiple-contains-dereferenced-output-item"}, - {`contains(fromJSON('[{ "result": "failure", "outputs": { "key": "val1" } },{ "result": "success", "outputs": { "key": "val2" } }]').*.outputs.key, 'val2') }}`, true, "multiple-contains-dereferenced-output-item-2"}, - {`contains(fromJSON('[{ "result": "failure", "outputs": { "key": "val1" } },{ "result": "success", "outputs": { "key": "val2" } }]').*.outputs.key, 'missing') }}`, false, "multiple-contains-dereferenced-output-misssing-item"}, - } - - env := &EvaluationEnvironment{} - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - output, err := NewInterpeter(env, Config{}).Evaluate(tt.input, DefaultStatusCheckNone) - assert.Nil(t, err) - - assert.Equal(t, tt.expected, output) - }) - } -} - -func TestFunctionStartsWith(t *testing.T) { - table := []struct { - input string - expected interface{} - name string - }{ - {"startsWith('search', 'se') }}", true, "startswith-string"}, - {"startsWith('search', 'sa') }}", false, "startswith-string"}, - {"startsWith('123search', '123s') }}", true, "startswith-string"}, - {"startsWith(123, 's') }}", false, "startswith-string"}, - {"startsWith(123, '12') }}", true, "startswith-string"}, - {"startsWith('123', 12) }}", true, "startswith-string"}, - {"startsWith(null, '42') }}", false, "startswith-string"}, - {"startsWith('null', null) }}", true, "startswith-string"}, - {"startsWith('null', '') }}", true, "startswith-string"}, - } - - env := &EvaluationEnvironment{} - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - output, err := NewInterpeter(env, Config{}).Evaluate(tt.input, DefaultStatusCheckNone) - assert.Nil(t, err) - - assert.Equal(t, tt.expected, output) - }) - } -} - -func TestFunctionEndsWith(t *testing.T) { - table := []struct { - input string - expected interface{} - name string - }{ - {"endsWith('search', 'ch') }}", true, "endsWith-string"}, - {"endsWith('search', 'sa') }}", false, "endsWith-string"}, - {"endsWith('search123s', '123s') }}", true, "endsWith-string"}, - {"endsWith(123, 's') }}", false, "endsWith-string"}, - {"endsWith(123, '23') }}", true, "endsWith-string"}, - {"endsWith('123', 23) }}", true, "endsWith-string"}, - {"endsWith(null, '42') }}", false, "endsWith-string"}, - {"endsWith('null', null) }}", true, "endsWith-string"}, - {"endsWith('null', '') }}", true, "endsWith-string"}, - } - - env := &EvaluationEnvironment{} - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - output, err := NewInterpeter(env, Config{}).Evaluate(tt.input, DefaultStatusCheckNone) - assert.Nil(t, err) - - assert.Equal(t, tt.expected, output) - }) - } -} - -func TestFunctionJoin(t *testing.T) { - table := []struct { - input string - expected interface{} - name string - }{ - {"join(fromJSON('[\"a\", \"b\"]'), ',')", "a,b", "join-arr"}, - {"join('string', ',')", "string", "join-str"}, - {"join(1, ',')", "1", "join-number"}, - {"join(null, ',')", "", "join-number"}, - {"join(fromJSON('[\"a\", \"b\", null]'), null)", "ab", "join-number"}, - {"join(fromJSON('[\"a\", \"b\"]'))", "a,b", "join-number"}, - {"join(fromJSON('[\"a\", \"b\", null]'), 1)", "a1b1", "join-number"}, - } - - env := &EvaluationEnvironment{} - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - output, err := NewInterpeter(env, Config{}).Evaluate(tt.input, DefaultStatusCheckNone) - assert.Nil(t, err) - - assert.Equal(t, tt.expected, output) - }) - } -} - -func TestFunctionToJSON(t *testing.T) { - table := []struct { - input string - expected interface{} - name string - }{ - {"toJSON(env) }}", "{\n \"key\": \"value\"\n}", "toJSON"}, - {"toJSON(null)", "null", "toJSON-null"}, - } - - env := &EvaluationEnvironment{ - Env: map[string]string{ - "key": "value", - }, - } - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - output, err := NewInterpeter(env, Config{}).Evaluate(tt.input, DefaultStatusCheckNone) - assert.Nil(t, err) - - assert.Equal(t, tt.expected, output) - }) - } -} - -func TestFunctionFromJSON(t *testing.T) { - table := []struct { - input string - expected interface{} - name string - }{ - {"fromJSON('{\"foo\":\"bar\"}') }}", map[string]interface{}{ - "foo": "bar", - }, "fromJSON"}, - } - - env := &EvaluationEnvironment{} - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - output, err := NewInterpeter(env, Config{}).Evaluate(tt.input, DefaultStatusCheckNone) - assert.Nil(t, err) - - assert.Equal(t, tt.expected, output) - }) - } -} - -func TestFunctionHashFiles(t *testing.T) { - table := []struct { - input string - expected interface{} - name string - }{ - {"hashFiles('**/non-extant-files') }}", "", "hash-non-existing-file"}, - {"hashFiles('**/non-extant-files', '**/more-non-extant-files') }}", "", "hash-multiple-non-existing-files"}, - {"hashFiles('./for-hashing-1.txt') }}", "66a045b452102c59d840ec097d59d9467e13a3f34f6494e539ffd32c1bb35f18", "hash-single-file"}, - {"hashFiles('./for-hashing-*.txt') }}", "8e5935e7e13368cd9688fe8f48a0955293676a021562582c7e848dafe13fb046", "hash-multiple-files"}, - {"hashFiles('./for-hashing-*.txt', '!./for-hashing-2.txt') }}", "66a045b452102c59d840ec097d59d9467e13a3f34f6494e539ffd32c1bb35f18", "hash-negative-pattern"}, - {"hashFiles('./for-hashing-**') }}", "c418ba693753c84115ced0da77f876cddc662b9054f4b129b90f822597ee2f94", "hash-multiple-files-and-directories"}, - {"hashFiles('./for-hashing-3/**') }}", "6f5696b546a7a9d6d42a449dc9a56bef244aaa826601ef27466168846139d2c2", "hash-nested-directories"}, - {"hashFiles('./for-hashing-3/**/nested-data.txt') }}", "8ecadfb49f7f978d0a9f3a957e9c8da6cc9ab871f5203b5d9f9d1dc87d8af18c", "hash-nested-directories-2"}, - } - - env := &EvaluationEnvironment{} - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - workdir, err := filepath.Abs("testdata") - assert.Nil(t, err) - output, err := NewInterpeter(env, Config{WorkingDir: workdir}).Evaluate(tt.input, DefaultStatusCheckNone) - assert.Nil(t, err) - - assert.Equal(t, tt.expected, output) - }) - } -} - -func TestFunctionFormat(t *testing.T) { - table := []struct { - input string - expected interface{} - error interface{} - name string - }{ - {"format('text')", "text", nil, "format-plain-string"}, - {"format('Hello {0} {1} {2}!', 'Mona', 'the', 'Octocat')", "Hello Mona the Octocat!", nil, "format-with-placeholders"}, - {"format('{{Hello {0} {1} {2}!}}', 'Mona', 'the', 'Octocat')", "{Hello Mona the Octocat!}", nil, "format-with-escaped-braces"}, - {"format('{{0}}', 'test')", "{0}", nil, "format-with-escaped-braces"}, - {"format('{{{0}}}', 'test')", "{test}", nil, "format-with-escaped-braces-and-value"}, - {"format('}}')", "}", nil, "format-output-closing-brace"}, - {`format('Hello "{0}" {1} {2} {3} {4}', null, true, -3.14, NaN, Infinity)`, `Hello "" true -3.14 NaN Infinity`, nil, "format-with-primitives"}, - {`format('Hello "{0}" {1} {2}', fromJSON('[0, true, "abc"]'), fromJSON('[{"a":1}]'), fromJSON('{"a":{"b":1}}'))`, `Hello "Array" Array Object`, nil, "format-with-complex-types"}, - {"format(true)", "true", nil, "format-with-primitive-args"}, - {"format('echo Hello {0} ${{Test}}', github.undefined_property)", "echo Hello ${Test}", nil, "format-with-undefined-value"}, - {"format('{0}}', '{1}', 'World')", nil, "closing bracket without opening one. The following format string is invalid: '{0}}'", "format-invalid-format-string"}, - {"format('{0', '{1}', 'World')", nil, "unclosed brackets. The following format string is invalid: '{0'", "format-invalid-format-string"}, - {"format('{2}', '{1}', 'World')", "", "the following format string references more arguments than were supplied: '{2}'", "format-invalid-replacement-reference"}, - {"format('{2147483648}')", "", "the following format string is invalid: '{2147483648}'", "format-invalid-replacement-reference"}, - {"format('{0} {1} {2} {3}', 1.0, 1.1, 1234567890.0, 12345678901234567890.0)", "1 1.1 1234567890 1.23456789012346E+19", nil, "format-floats"}, - } - - env := &EvaluationEnvironment{ - Github: &model.GithubContext{}, - } - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - output, err := NewInterpeter(env, Config{}).Evaluate(tt.input, DefaultStatusCheckNone) - if tt.error != nil { - assert.Equal(t, tt.error, err.Error()) - } else { - assert.Nil(t, err) - assert.Equal(t, tt.expected, output) - } - }) - } -} - -func TestMapContains(t *testing.T) { - env := &EvaluationEnvironment{ - Needs: map[string]Needs{ - "first-job": { - Outputs: map[string]string{}, - Result: "success", - }, - "second-job": { - Outputs: map[string]string{}, - Result: "failure", - }, - }, - } - - output, err := NewInterpeter(env, Config{}).Evaluate("contains(needs.*.result, 'failure')", DefaultStatusCheckNone) - assert.Nil(t, err) - - assert.Equal(t, true, output) -} diff --git a/pkg/exprparser/interpreter.go b/pkg/exprparser/interpreter.go index c99fccaf..e12c48f7 100644 --- a/pkg/exprparser/interpreter.go +++ b/pkg/exprparser/interpreter.go @@ -2,14 +2,14 @@ package exprparser import ( "encoding" - "encoding/json" "fmt" "math" "reflect" "strings" + eval "github.com/actions-oss/act-cli/internal/eval/v2" + exprparser "github.com/actions-oss/act-cli/internal/expr" "github.com/actions-oss/act-cli/pkg/model" - "github.com/rhysd/actionlint" ) type EvaluationEnvironment struct { @@ -83,22 +83,109 @@ func NewInterpeter(env *EvaluationEnvironment, config Config) Interpreter { } } +func toRawObj(left reflect.Value) map[string]any { + res, _ := toRaw(left).(map[string]any) + return res +} + +func toRaw(left reflect.Value) any { + if left.IsZero() { + return nil + } + switch left.Kind() { + case reflect.Pointer: + if left.IsNil() { + return nil + } + return toRaw(left.Elem()) + case reflect.Map: + iter := left.MapRange() + + m := map[string]any{} + + for iter.Next() { + key := iter.Key() + + if key.Kind() == reflect.String { + nv := toRaw(iter.Value()) + if nv != nil { + m[key.String()] = nv + } + } + } + if len(m) == 0 { + return nil + } + return m + case reflect.Struct: + m := map[string]any{} + + leftType := left.Type() + for i := 0; i < leftType.NumField(); i++ { + var name string + if jsonName := leftType.Field(i).Tag.Get("json"); jsonName != "" { + name, _, _ = strings.Cut(jsonName, ",") + } + if name == "" { + name = leftType.Field(i).Name + } + v := left.Field(i).Interface() + if t, ok := v.(encoding.TextMarshaler); ok { + text, _ := t.MarshalText() + if len(text) > 0 { + m[name] = string(text) + } + } else { + nv := toRaw(left.Field(i)) + if nv != nil { + m[name] = nv + } + } + } + + return m + } + return left.Interface() +} + +// All values are evaluated as string, funcs that takes objects are implemented elsewhere +type externalFunc struct { + f func([]reflect.Value) (interface{}, error) +} + +func (e externalFunc) Evaluate(ev *eval.Evaluator, args []exprparser.Node) (*eval.EvaluationResult, error) { + rargs := []reflect.Value{} + for _, arg := range args { + res, err := ev.Evaluate(arg) + if err != nil { + return nil, err + } + rargs = append(rargs, reflect.ValueOf(res.ConvertToString())) + } + res, err := e.f(rargs) + if err != nil { + return nil, err + } + return eval.CreateIntermediateResult(ev.Context(), res), nil +} + func (impl *interperterImpl) Evaluate(input string, defaultStatusCheck DefaultStatusCheck) (interface{}, error) { input = strings.TrimPrefix(input, "${{") + input = strings.TrimSuffix(input, "}}") if defaultStatusCheck != DefaultStatusCheckNone && input == "" { input = "success()" } - parser := actionlint.NewExprParser() - exprNode, err := parser.Parse(actionlint.NewExprLexer(input + "}}")) + + exprNode, err := exprparser.Parse(input) if err != nil { - return nil, fmt.Errorf("failed to parse: %s", err.Message) + return nil, fmt.Errorf("failed to parse: %s", err.Error()) } if defaultStatusCheck != DefaultStatusCheckNone { hasStatusCheckFunction := false - actionlint.VisitExprNode(exprNode, func(node, _ actionlint.ExprNode, entering bool) { - if funcCallNode, ok := node.(*actionlint.FuncCallNode); entering && ok { - switch strings.ToLower(funcCallNode.Callee) { + exprparser.VisitNode(exprNode, func(node exprparser.Node) { + if funcCallNode, ok := node.(*exprparser.FunctionNode); ok { + switch strings.ToLower(funcCallNode.Name) { case "success", "always", "cancelled", "failure": hasStatusCheckFunction = true } @@ -106,470 +193,103 @@ func (impl *interperterImpl) Evaluate(input string, defaultStatusCheck DefaultSt }) if !hasStatusCheckFunction { - exprNode = &actionlint.LogicalOpNode{ - Kind: actionlint.LogicalOpNodeKindAnd, - Left: &actionlint.FuncCallNode{ - Callee: defaultStatusCheck.String(), - Args: []actionlint.ExprNode{}, + exprNode = &exprparser.BinaryNode{ + Op: "&&", + Left: &exprparser.FunctionNode{ + Name: defaultStatusCheck.String(), + Args: []exprparser.Node{}, }, Right: exprNode, } } } - result, err2 := impl.evaluateNode(exprNode) + functions := impl.GetFunctions() - return result, err2 + vars := impl.GetVariables() + + ctx := eval.EvaluationContext{ + Functions: functions, + Variables: vars, + } + evaluator := eval.NewEvaluator(&ctx) + res, err := evaluator.Evaluate(exprNode) + if err != nil { + return nil, err + } + return evaluator.ToRaw(res) } -func (impl *interperterImpl) evaluateNode(exprNode actionlint.ExprNode) (interface{}, error) { - switch node := exprNode.(type) { - case *actionlint.VariableNode: - return impl.evaluateVariable(node) - case *actionlint.BoolNode: - return node.Value, nil - case *actionlint.NullNode: - return nil, nil - case *actionlint.IntNode: - return node.Value, nil - case *actionlint.FloatNode: - return node.Value, nil - case *actionlint.StringNode: - return node.Value, nil - case *actionlint.IndexAccessNode: - return impl.evaluateIndexAccess(node) - case *actionlint.ObjectDerefNode: - return impl.evaluateObjectDeref(node) - case *actionlint.ArrayDerefNode: - return impl.evaluateArrayDeref(node) - case *actionlint.NotOpNode: - return impl.evaluateNot(node) - case *actionlint.CompareOpNode: - return impl.evaluateCompare(node) - case *actionlint.LogicalOpNode: - return impl.evaluateLogicalCompare(node) - case *actionlint.FuncCallNode: - return impl.evaluateFuncCall(node) - default: - return nil, fmt.Errorf("fatal error! Unknown node type: %s node: %+v", reflect.TypeOf(exprNode), exprNode) +func (impl *interperterImpl) GetFunctions() eval.CaseInsensitiveObject[eval.Function] { + functions := eval.GetFunctions() + if impl.env.HashFiles != nil { + functions["hashfiles"] = &externalFunc{impl.env.HashFiles} } -} - -func (impl *interperterImpl) evaluateVariable(variableNode *actionlint.VariableNode) (interface{}, error) { - lowerName := strings.ToLower(variableNode.Name) - if result, err := impl.evaluateOverriddenVariable(lowerName); result != nil || err != nil { - return result, err - } - switch lowerName { - case "github": - return impl.env.Github, nil - case "env": - if impl.env.EnvCS { - return CaseSensitiveDict(impl.env.Env), nil + functions["always"] = &externalFunc{func(_ []reflect.Value) (interface{}, error) { + return impl.always() + }} + functions["success"] = &externalFunc{func(_ []reflect.Value) (interface{}, error) { + if impl.config.Context == "job" { + return impl.jobSuccess() } - return impl.env.Env, nil - case "job": - return impl.env.Job, nil - case "jobs": - if impl.env.Jobs == nil { - return nil, fmt.Errorf("unavailable context: jobs") + if impl.config.Context == "step" { + return impl.stepSuccess() } - return impl.env.Jobs, nil - case "steps": - return impl.env.Steps, nil - case "runner": - return impl.env.Runner, nil - case "secrets": - return impl.env.Secrets, nil - case "vars": - return impl.env.Vars, nil - case "strategy": - return impl.env.Strategy, nil - case "matrix": - return impl.env.Matrix, nil - case "needs": - return impl.env.Needs, nil - case "inputs": - return impl.env.Inputs, nil - case "infinity": - return math.Inf(1), nil - case "nan": - return math.NaN(), nil - default: - return nil, fmt.Errorf("unavailable context: %s", variableNode.Name) - } + return nil, fmt.Errorf("context '%s' must be one of 'job' or 'step'", impl.config.Context) + }} + functions["failure"] = &externalFunc{func(_ []reflect.Value) (interface{}, error) { + if impl.config.Context == "job" { + return impl.jobFailure() + } + if impl.config.Context == "step" { + return impl.stepFailure() + } + return nil, fmt.Errorf("context '%s' must be one of 'job' or 'step'", impl.config.Context) + }} + functions["cancelled"] = &externalFunc{func(_ []reflect.Value) (interface{}, error) { + return impl.cancelled() + }} + return functions } -func (impl *interperterImpl) evaluateOverriddenVariable(lowerName string) (interface{}, error) { - if cd, ok := impl.env.CtxData[lowerName]; ok { +func (impl *interperterImpl) GetVariables() eval.ReadOnlyObject[any] { + githubCtx := toRawObj(reflect.ValueOf(impl.env.Github)) + var env any + if impl.env.EnvCS { + env = eval.CaseSensitiveObject[any](toRawObj(reflect.ValueOf(impl.env.Env))) + } else { + env = eval.CaseInsensitiveObject[any](toRawObj(reflect.ValueOf(impl.env.Env))) + } + vars := eval.CaseInsensitiveObject[any]{ + "github": githubCtx, + "env": env, + "vars": toRawObj(reflect.ValueOf(impl.env.Vars)), + "steps": toRawObj(reflect.ValueOf(impl.env.Steps)), + "strategy": toRawObj(reflect.ValueOf(impl.env.Strategy)), + "matrix": toRawObj(reflect.ValueOf(impl.env.Matrix)), + "secrets": toRawObj(reflect.ValueOf(impl.env.Secrets)), + "job": toRawObj(reflect.ValueOf(impl.env.Job)), + "runner": toRawObj(reflect.ValueOf(impl.env.Runner)), + "needs": toRawObj(reflect.ValueOf(impl.env.Needs)), + "jobs": toRawObj(reflect.ValueOf(impl.env.Jobs)), + "inputs": toRawObj(reflect.ValueOf(impl.env.Inputs)), + } + for name, cd := range impl.env.CtxData { + lowerName := strings.ToLower(name) if serverPayload, ok := cd.(map[string]interface{}); ok { if lowerName == "github" { - var out map[string]interface{} - content, err := json.Marshal(impl.env.Github) - if err != nil { - return nil, err - } - err = json.Unmarshal(content, &out) - if err != nil { - return nil, err - } for k, v := range serverPayload { // skip empty values, because github.workspace was set by Gitea Actions to an empty string - if _, ok := out[k]; !ok || v != "" && v != nil { - out[k] = v + if _, ok := githubCtx[k]; !ok || v != "" && v != nil { + githubCtx[k] = v } } - return out, nil + continue } } - return cd, nil - } - return nil, nil -} - -func (impl *interperterImpl) evaluateIndexAccess(indexAccessNode *actionlint.IndexAccessNode) (interface{}, error) { - left, err := impl.evaluateNode(indexAccessNode.Operand) - if err != nil { - return nil, err - } - - leftValue := reflect.ValueOf(left) - - right, err := impl.evaluateNode(indexAccessNode.Index) - if err != nil { - return nil, err - } - - rightValue := reflect.ValueOf(right) - - switch rightValue.Kind() { - case reflect.String: - return impl.getPropertyValue(leftValue, rightValue.String()) - - case reflect.Int: - switch leftValue.Kind() { - case reflect.Slice: - if rightValue.Int() < 0 || rightValue.Int() >= int64(leftValue.Len()) { - return nil, nil - } - return leftValue.Index(int(rightValue.Int())).Interface(), nil - default: - return nil, nil - } - - default: - return nil, nil - } -} - -func (impl *interperterImpl) evaluateObjectDeref(objectDerefNode *actionlint.ObjectDerefNode) (interface{}, error) { - left, err := impl.evaluateNode(objectDerefNode.Receiver) - if err != nil { - return nil, err - } - - _, receiverIsDeref := objectDerefNode.Receiver.(*actionlint.ArrayDerefNode) - if receiverIsDeref { - return impl.getPropertyValueDereferenced(reflect.ValueOf(left), objectDerefNode.Property) - } - return impl.getPropertyValue(reflect.ValueOf(left), objectDerefNode.Property) -} - -func (impl *interperterImpl) evaluateArrayDeref(arrayDerefNode *actionlint.ArrayDerefNode) (interface{}, error) { - left, err := impl.evaluateNode(arrayDerefNode.Receiver) - if err != nil { - return nil, err - } - - return impl.getSafeValue(reflect.ValueOf(left)), nil -} - -func (impl *interperterImpl) getPropertyValue(left reflect.Value, property string) (value interface{}, err error) { - switch left.Kind() { - case reflect.Ptr: - return impl.getPropertyValue(left.Elem(), property) - - case reflect.Struct: - leftType := left.Type() - for i := 0; i < leftType.NumField(); i++ { - jsonName := leftType.Field(i).Tag.Get("json") - if jsonName == property { - property = leftType.Field(i).Name - break - } - } - - fieldValue := left.FieldByNameFunc(func(name string) bool { - return strings.EqualFold(name, property) - }) - - if fieldValue.Kind() == reflect.Invalid { - return "", nil - } - - i := fieldValue.Interface() - // The type stepStatus int is an integer, but should be treated as string - if m, ok := i.(encoding.TextMarshaler); ok { - text, err := m.MarshalText() - if err != nil { - return nil, err - } - return string(text), nil - } - return i, nil - - case reflect.Map: - cd, ok := left.Interface().(CaseSensitiveDict) - if ok { - return cd[property], nil - } - - iter := left.MapRange() - - for iter.Next() { - key := iter.Key() - - switch key.Kind() { - case reflect.String: - if strings.EqualFold(key.String(), property) { - return impl.getMapValue(iter.Value()) - } - - default: - return nil, fmt.Errorf("'%s' in map key not implemented", key.Kind()) - } - } - - return nil, nil - - case reflect.Slice: - var values []interface{} - - for i := 0; i < left.Len(); i++ { - value, err := impl.getPropertyValue(left.Index(i).Elem(), property) - if err != nil { - return nil, err - } - - values = append(values, value) - } - - return values, nil - } - - return nil, nil -} - -func (impl *interperterImpl) getPropertyValueDereferenced(left reflect.Value, property string) (value interface{}, err error) { - switch left.Kind() { - case reflect.Ptr: - return impl.getPropertyValue(left, property) - - case reflect.Struct: - return impl.getPropertyValue(left, property) - case reflect.Map: - iter := left.MapRange() - - var values []interface{} - for iter.Next() { - value, err := impl.getPropertyValue(iter.Value(), property) - if err != nil { - return nil, err - } - - values = append(values, value) - } - - return values, nil - case reflect.Slice: - return impl.getPropertyValue(left, property) - } - - return nil, nil -} - -func (impl *interperterImpl) getMapValue(value reflect.Value) (interface{}, error) { - if value.Kind() == reflect.Ptr { - return impl.getMapValue(value.Elem()) - } - - return value.Interface(), nil -} - -func (impl *interperterImpl) evaluateNot(notNode *actionlint.NotOpNode) (interface{}, error) { - operand, err := impl.evaluateNode(notNode.Operand) - if err != nil { - return nil, err - } - - return !IsTruthy(operand), nil -} - -func (impl *interperterImpl) evaluateCompare(compareNode *actionlint.CompareOpNode) (interface{}, error) { - left, err := impl.evaluateNode(compareNode.Left) - if err != nil { - return nil, err - } - - right, err := impl.evaluateNode(compareNode.Right) - if err != nil { - return nil, err - } - - leftValue := reflect.ValueOf(left) - rightValue := reflect.ValueOf(right) - - return impl.compareValues(leftValue, rightValue, compareNode.Kind) -} - -func (impl *interperterImpl) compareValues(leftValue reflect.Value, rightValue reflect.Value, kind actionlint.CompareOpNodeKind) (interface{}, error) { - if leftValue.Kind() != rightValue.Kind() { - if !impl.isNumber(leftValue) { - leftValue = impl.coerceToNumber(leftValue) - } - if !impl.isNumber(rightValue) { - rightValue = impl.coerceToNumber(rightValue) - } - } - - switch leftValue.Kind() { - case reflect.Bool: - return impl.compareNumber(float64(impl.coerceToNumber(leftValue).Int()), float64(impl.coerceToNumber(rightValue).Int()), kind) - case reflect.String: - return impl.compareString(strings.ToLower(leftValue.String()), strings.ToLower(rightValue.String()), kind) - - case reflect.Int: - if rightValue.Kind() == reflect.Float64 { - return impl.compareNumber(float64(leftValue.Int()), rightValue.Float(), kind) - } - - return impl.compareNumber(float64(leftValue.Int()), float64(rightValue.Int()), kind) - - case reflect.Float64: - if rightValue.Kind() == reflect.Int { - return impl.compareNumber(leftValue.Float(), float64(rightValue.Int()), kind) - } - - return impl.compareNumber(leftValue.Float(), rightValue.Float(), kind) - - case reflect.Invalid: - if rightValue.Kind() == reflect.Invalid { - return true, nil - } - - // not possible situation - params are converted to the same type in code above - return nil, fmt.Errorf("compare params of Invalid type: left: %+v, right: %+v", leftValue.Kind(), rightValue.Kind()) - - default: - return nil, fmt.Errorf("compare not implemented for types: left: %+v, right: %+v", leftValue.Kind(), rightValue.Kind()) - } -} - -func (impl *interperterImpl) coerceToNumber(value reflect.Value) reflect.Value { - switch value.Kind() { - case reflect.Invalid: - return reflect.ValueOf(0) - - case reflect.Bool: - switch value.Bool() { - case true: - return reflect.ValueOf(1) - case false: - return reflect.ValueOf(0) - } - - case reflect.String: - if value.String() == "" { - return reflect.ValueOf(0) - } - - // try to parse the string as a number - evaluated, err := impl.Evaluate(value.String(), DefaultStatusCheckNone) - if err != nil { - return reflect.ValueOf(math.NaN()) - } - - if value := reflect.ValueOf(evaluated); impl.isNumber(value) { - return value - } - } - - return reflect.ValueOf(math.NaN()) -} - -func (impl *interperterImpl) coerceToString(value reflect.Value) reflect.Value { - switch value.Kind() { - case reflect.Invalid: - return reflect.ValueOf("") - - case reflect.Bool: - switch value.Bool() { - case true: - return reflect.ValueOf("true") - case false: - return reflect.ValueOf("false") - } - - case reflect.String: - return value - - case reflect.Int: - return reflect.ValueOf(fmt.Sprint(value)) - - case reflect.Float64: - if math.IsInf(value.Float(), 1) { - return reflect.ValueOf("Infinity") - } else if math.IsInf(value.Float(), -1) { - return reflect.ValueOf("-Infinity") - } - return reflect.ValueOf(fmt.Sprintf("%.15G", value.Float())) - - case reflect.Slice: - return reflect.ValueOf("Array") - - case reflect.Map: - return reflect.ValueOf("Object") - } - - return value -} - -func (impl *interperterImpl) compareString(left string, right string, kind actionlint.CompareOpNodeKind) (bool, error) { - switch kind { - case actionlint.CompareOpNodeKindLess: - return left < right, nil - case actionlint.CompareOpNodeKindLessEq: - return left <= right, nil - case actionlint.CompareOpNodeKindGreater: - return left > right, nil - case actionlint.CompareOpNodeKindGreaterEq: - return left >= right, nil - case actionlint.CompareOpNodeKindEq: - return left == right, nil - case actionlint.CompareOpNodeKindNotEq: - return left != right, nil - default: - return false, fmt.Errorf("todo: not implemented to compare '%+v'", kind) - } -} - -func (impl *interperterImpl) compareNumber(left float64, right float64, kind actionlint.CompareOpNodeKind) (bool, error) { - switch kind { - case actionlint.CompareOpNodeKindLess: - return left < right, nil - case actionlint.CompareOpNodeKindLessEq: - return left <= right, nil - case actionlint.CompareOpNodeKindGreater: - return left > right, nil - case actionlint.CompareOpNodeKindGreaterEq: - return left >= right, nil - case actionlint.CompareOpNodeKindEq: - return left == right, nil - case actionlint.CompareOpNodeKindNotEq: - return left != right, nil - default: - return false, fmt.Errorf("todo: not implemented to compare '%+v'", kind) + vars[name] = cd } + return vars } func IsTruthy(input interface{}) bool { @@ -598,116 +318,3 @@ func IsTruthy(input interface{}) bool { return false } } - -func (impl *interperterImpl) isNumber(value reflect.Value) bool { - switch value.Kind() { - case reflect.Int, reflect.Float64: - return true - default: - return false - } -} - -func (impl *interperterImpl) getSafeValue(value reflect.Value) interface{} { - switch value.Kind() { - case reflect.Invalid: - return nil - - case reflect.Float64: - if value.Float() == 0 { - return 0 - } - } - - return value.Interface() -} - -func (impl *interperterImpl) evaluateLogicalCompare(compareNode *actionlint.LogicalOpNode) (interface{}, error) { - left, err := impl.evaluateNode(compareNode.Left) - if err != nil { - return nil, err - } - - leftValue := reflect.ValueOf(left) - - if IsTruthy(left) == (compareNode.Kind == actionlint.LogicalOpNodeKindOr) { - return impl.getSafeValue(leftValue), nil - } - - right, err := impl.evaluateNode(compareNode.Right) - if err != nil { - return nil, err - } - - rightValue := reflect.ValueOf(right) - - switch compareNode.Kind { - case actionlint.LogicalOpNodeKindAnd: - return impl.getSafeValue(rightValue), nil - case actionlint.LogicalOpNodeKindOr: - return impl.getSafeValue(rightValue), nil - } - - return nil, fmt.Errorf("unable to compare incompatibles types '%s' and '%s'", leftValue.Kind(), rightValue.Kind()) -} - -//nolint:gocyclo -func (impl *interperterImpl) evaluateFuncCall(funcCallNode *actionlint.FuncCallNode) (interface{}, error) { - args := make([]reflect.Value, 0) - - for _, arg := range funcCallNode.Args { - value, err := impl.evaluateNode(arg) - if err != nil { - return nil, err - } - - args = append(args, reflect.ValueOf(value)) - } - - switch strings.ToLower(funcCallNode.Callee) { - case "contains": - return impl.contains(args[0], args[1]) - case "startswith": - return impl.startsWith(args[0], args[1]) - case "endswith": - return impl.endsWith(args[0], args[1]) - case "format": - return impl.format(args[0], args[1:]...) - case "join": - if len(args) == 1 { - return impl.join(args[0], reflect.ValueOf(",")) - } - return impl.join(args[0], args[1]) - case "tojson": - return impl.toJSON(args[0]) - case "fromjson": - return impl.fromJSON(args[0]) - case "hashfiles": - if impl.env.HashFiles != nil { - return impl.env.HashFiles(args) - } - return impl.hashFiles(args...) - case "always": - return impl.always() - case "success": - if impl.config.Context == "job" { - return impl.jobSuccess() - } - if impl.config.Context == "step" { - return impl.stepSuccess() - } - return nil, fmt.Errorf("context '%s' must be one of 'job' or 'step'", impl.config.Context) - case "failure": - if impl.config.Context == "job" { - return impl.jobFailure() - } - if impl.config.Context == "step" { - return impl.stepFailure() - } - return nil, fmt.Errorf("context '%s' must be one of 'job' or 'step'", impl.config.Context) - case "cancelled": - return impl.cancelled() - default: - return nil, fmt.Errorf("todo: '%s' not implemented", funcCallNode.Callee) - } -} diff --git a/pkg/exprparser/interpreter_test.go b/pkg/exprparser/interpreter_test.go index 64525f28..940b8ce3 100644 --- a/pkg/exprparser/interpreter_test.go +++ b/pkg/exprparser/interpreter_test.go @@ -17,9 +17,9 @@ func TestLiterals(t *testing.T) { {"true", true, "true"}, {"false", false, "false"}, {"null", nil, "null"}, - {"123", 123, "integer"}, + {"123", float64(123), "integer"}, {"-9.7", -9.7, "float"}, - {"0xff", 255, "hex"}, + {"0xff", float64(255), "hex"}, {"-2.99e-2", -2.99e-2, "exponential"}, {"'foo'", "foo", "string"}, {"'it''s foo'", "it's foo", "string"}, @@ -50,10 +50,11 @@ func TestOperators(t *testing.T) { {"github.action[0]", nil, "string-index", ""}, {"github.action['0']", nil, "string-index", ""}, {"fromJSON('[0,1]')[1]", 1.0, "array-index", ""}, - {"fromJSON('[0,1]')[1.1]", nil, "array-index", ""}, - // Disabled weird things are happening - // {"fromJSON('[0,1]')['1.1']", nil, "array-index", ""}, - {"(github.event.commits.*.author.username)[0]", "someone", "array-index-0", ""}, + {"fromJSON('[0,1]')[1.1]", 1.0, "array-index", ""}, + {"fromJSON('[0,1]')['1.1']", nil, "array-index", ""}, + // Invalid Test + // {"(github.event.commits.*.author.username)[0]", "someone", "array-index-0", ""}, + {"fromjson(tojson(github.event.commits.*.author.username))[0]", "someone", "array-index-0", ""}, {"fromJSON('[0,1]')[2]", nil, "array-index-out-of-bounds-0", ""}, {"fromJSON('[0,1]')[34553]", nil, "array-index-out-of-bounds-1", ""}, {"fromJSON('[0,1]')[-1]", nil, "array-index-out-of-bounds-2", ""}, @@ -72,8 +73,9 @@ func TestOperators(t *testing.T) { {"github.event.commits[0].author.username != github.event.commits[1].author.username", true, "property-comparison1", ""}, {"github.event.commits[0].author.username1 != github.event.commits[1].author.username", true, "property-comparison2", ""}, {"github.event.commits[0].author.username != github.event.commits[1].author.username1", true, "property-comparison3", ""}, - {"github.event.commits[0].author.username1 != github.event.commits[1].author.username2", true, "property-comparison4", ""}, - {"secrets != env", nil, "property-comparison5", "compare not implemented for types: left: map, right: map"}, + {"github.event.commits[0].author.username1 != github.event.commits[1].author.username2", false, "property-comparison4", ""}, + {"secrets != env", true, "property-comparison5", ""}, + {"job.container && 'failure' || 'ok'", "ok", "object-truth", ""}, } env := &EvaluationEnvironment{ @@ -175,7 +177,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"true && 3.14", 3.14, "true-and"}, {"true && 0.0", 0, "true-and"}, {"true && Infinity", math.Inf(1), "true-and"}, - // {"true && -Infinity", math.Inf(-1), "true-and"}, + {"true && -Infinity", math.Inf(-1), "true-and"}, {"true && NaN", math.NaN(), "true-and"}, {"true && ''", "", "true-and"}, {"true && 'abc'", "abc", "true-and"}, @@ -189,7 +191,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"false && 3.14", false, "false-and"}, {"false && 0.0", false, "false-and"}, {"false && Infinity", false, "false-and"}, - // {"false && -Infinity", false, "false-and"}, + {"false && -Infinity", false, "false-and"}, {"false && NaN", false, "false-and"}, {"false && ''", false, "false-and"}, {"false && 'abc'", false, "false-and"}, @@ -203,7 +205,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"true || 3.14", true, "true-or"}, {"true || 0.0", true, "true-or"}, {"true || Infinity", true, "true-or"}, - // {"true || -Infinity", true, "true-or"}, + {"true || -Infinity", true, "true-or"}, {"true || NaN", true, "true-or"}, {"true || ''", true, "true-or"}, {"true || 'abc'", true, "true-or"}, @@ -217,7 +219,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"false || 3.14", 3.14, "false-or"}, {"false || 0.0", 0, "false-or"}, {"false || Infinity", math.Inf(1), "false-or"}, - // {"false || -Infinity", math.Inf(-1), "false-or"}, + {"false || -Infinity", math.Inf(-1), "false-or"}, {"false || NaN", math.NaN(), "false-or"}, {"false || ''", "", "false-or"}, {"false || 'abc'", "abc", "false-or"}, @@ -231,7 +233,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"null && 3.14", nil, "null-and"}, {"null && 0.0", nil, "null-and"}, {"null && Infinity", nil, "null-and"}, - // {"null && -Infinity", nil, "null-and"}, + {"null && -Infinity", nil, "null-and"}, {"null && NaN", nil, "null-and"}, {"null && ''", nil, "null-and"}, {"null && 'abc'", nil, "null-and"}, @@ -245,7 +247,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"null || 3.14", 3.14, "null-or"}, {"null || 0.0", 0, "null-or"}, {"null || Infinity", math.Inf(1), "null-or"}, - // {"null || -Infinity", math.Inf(-1), "null-or"}, + {"null || -Infinity", math.Inf(-1), "null-or"}, {"null || NaN", math.NaN(), "null-or"}, {"null || ''", "", "null-or"}, {"null || 'abc'", "abc", "null-or"}, @@ -259,7 +261,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"-10 && 3.14", 3.14, "neg-num-and"}, {"-10 && 0.0", 0, "neg-num-and"}, {"-10 && Infinity", math.Inf(1), "neg-num-and"}, - // {"-10 && -Infinity", math.Inf(-1), "neg-num-and"}, + {"-10 && -Infinity", math.Inf(-1), "neg-num-and"}, {"-10 && NaN", math.NaN(), "neg-num-and"}, {"-10 && ''", "", "neg-num-and"}, {"-10 && 'abc'", "abc", "neg-num-and"}, @@ -273,7 +275,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"-10 || 3.14", -10, "neg-num-or"}, {"-10 || 0.0", -10, "neg-num-or"}, {"-10 || Infinity", -10, "neg-num-or"}, - // {"-10 || -Infinity", -10, "neg-num-or"}, + {"-10 || -Infinity", -10, "neg-num-or"}, {"-10 || NaN", -10, "neg-num-or"}, {"-10 || ''", -10, "neg-num-or"}, {"-10 || 'abc'", -10, "neg-num-or"}, @@ -287,7 +289,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"0 && 3.14", 0, "zero-and"}, {"0 && 0.0", 0, "zero-and"}, {"0 && Infinity", 0, "zero-and"}, - // {"0 && -Infinity", 0, "zero-and"}, + {"0 && -Infinity", 0, "zero-and"}, {"0 && NaN", 0, "zero-and"}, {"0 && ''", 0, "zero-and"}, {"0 && 'abc'", 0, "zero-and"}, @@ -301,7 +303,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"0 || 3.14", 3.14, "zero-or"}, {"0 || 0.0", 0, "zero-or"}, {"0 || Infinity", math.Inf(1), "zero-or"}, - // {"0 || -Infinity", math.Inf(-1), "zero-or"}, + {"0 || -Infinity", math.Inf(-1), "zero-or"}, {"0 || NaN", math.NaN(), "zero-or"}, {"0 || ''", "", "zero-or"}, {"0 || 'abc'", "abc", "zero-or"}, @@ -343,7 +345,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"3.14 && 3.14", 3.14, "pos-float-and"}, {"3.14 && 0.0", 0, "pos-float-and"}, {"3.14 && Infinity", math.Inf(1), "pos-float-and"}, - // {"3.14 && -Infinity", math.Inf(-1), "pos-float-and"}, + {"3.14 && -Infinity", math.Inf(-1), "pos-float-and"}, {"3.14 && NaN", math.NaN(), "pos-float-and"}, {"3.14 && ''", "", "pos-float-and"}, {"3.14 && 'abc'", "abc", "pos-float-and"}, @@ -357,7 +359,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"3.14 || 3.14", 3.14, "pos-float-or"}, {"3.14 || 0.0", 3.14, "pos-float-or"}, {"3.14 || Infinity", 3.14, "pos-float-or"}, - // {"3.14 || -Infinity", 3.14, "pos-float-or"}, + {"3.14 || -Infinity", 3.14, "pos-float-or"}, {"3.14 || NaN", 3.14, "pos-float-or"}, {"3.14 || ''", 3.14, "pos-float-or"}, {"3.14 || 'abc'", 3.14, "pos-float-or"}, @@ -371,7 +373,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"Infinity && 3.14", 3.14, "pos-inf-and"}, {"Infinity && 0.0", 0, "pos-inf-and"}, {"Infinity && Infinity", math.Inf(1), "pos-inf-and"}, - // {"Infinity && -Infinity", math.Inf(-1), "pos-inf-and"}, + {"Infinity && -Infinity", math.Inf(-1), "pos-inf-and"}, {"Infinity && NaN", math.NaN(), "pos-inf-and"}, {"Infinity && ''", "", "pos-inf-and"}, {"Infinity && 'abc'", "abc", "pos-inf-and"}, @@ -385,38 +387,38 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"Infinity || 3.14", math.Inf(1), "pos-inf-or"}, {"Infinity || 0.0", math.Inf(1), "pos-inf-or"}, {"Infinity || Infinity", math.Inf(1), "pos-inf-or"}, - // {"Infinity || -Infinity", math.Inf(1), "pos-inf-or"}, + {"Infinity || -Infinity", math.Inf(1), "pos-inf-or"}, {"Infinity || NaN", math.Inf(1), "pos-inf-or"}, {"Infinity || ''", math.Inf(1), "pos-inf-or"}, {"Infinity || 'abc'", math.Inf(1), "pos-inf-or"}, // -Infinity && - // {"-Infinity && true", true, "neg-inf-and"}, - // {"-Infinity && false", false, "neg-inf-and"}, - // {"-Infinity && null", nil, "neg-inf-and"}, - // {"-Infinity && -10", -10, "neg-inf-and"}, - // {"-Infinity && 0", 0, "neg-inf-and"}, - // {"-Infinity && 10", 10, "neg-inf-and"}, - // {"-Infinity && 3.14", 3.14, "neg-inf-and"}, - // {"-Infinity && 0.0", 0, "neg-inf-and"}, - // {"-Infinity && Infinity", math.Inf(1), "neg-inf-and"}, - // {"-Infinity && -Infinity", math.Inf(-1), "neg-inf-and"}, - // {"-Infinity && NaN", math.NaN(), "neg-inf-and"}, - // {"-Infinity && ''", "", "neg-inf-and"}, - // {"-Infinity && 'abc'", "abc", "neg-inf-and"}, + {"-Infinity && true", true, "neg-inf-and"}, + {"-Infinity && false", false, "neg-inf-and"}, + {"-Infinity && null", nil, "neg-inf-and"}, + {"-Infinity && -10", -10, "neg-inf-and"}, + {"-Infinity && 0", 0, "neg-inf-and"}, + {"-Infinity && 10", 10, "neg-inf-and"}, + {"-Infinity && 3.14", 3.14, "neg-inf-and"}, + {"-Infinity && 0.0", 0, "neg-inf-and"}, + {"-Infinity && Infinity", math.Inf(1), "neg-inf-and"}, + {"-Infinity && -Infinity", math.Inf(-1), "neg-inf-and"}, + {"-Infinity && NaN", math.NaN(), "neg-inf-and"}, + {"-Infinity && ''", "", "neg-inf-and"}, + {"-Infinity && 'abc'", "abc", "neg-inf-and"}, // -Infinity || - // {"-Infinity || true", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || false", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || null", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || -10", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || 0", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || 10", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || 3.14", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || 0.0", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || Infinity", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || -Infinity", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || NaN", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || ''", math.Inf(-1), "neg-inf-or"}, - // {"-Infinity || 'abc'", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || true", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || false", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || null", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || -10", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || 0", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || 10", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || 3.14", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || 0.0", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || Infinity", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || -Infinity", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || NaN", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || ''", math.Inf(-1), "neg-inf-or"}, + {"-Infinity || 'abc'", math.Inf(-1), "neg-inf-or"}, // NaN && {"NaN && true", math.NaN(), "nan-and"}, {"NaN && false", math.NaN(), "nan-and"}, @@ -427,7 +429,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"NaN && 3.14", math.NaN(), "nan-and"}, {"NaN && 0.0", math.NaN(), "nan-and"}, {"NaN && Infinity", math.NaN(), "nan-and"}, - // {"NaN && -Infinity", math.NaN(), "nan-and"}, + {"NaN && -Infinity", math.NaN(), "nan-and"}, {"NaN && NaN", math.NaN(), "nan-and"}, {"NaN && ''", math.NaN(), "nan-and"}, {"NaN && 'abc'", math.NaN(), "nan-and"}, @@ -441,7 +443,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"NaN || 3.14", 3.14, "nan-or"}, {"NaN || 0.0", 0, "nan-or"}, {"NaN || Infinity", math.Inf(1), "nan-or"}, - // {"NaN || -Infinity", math.Inf(-1), "nan-or"}, + {"NaN || -Infinity", math.Inf(-1), "nan-or"}, {"NaN || NaN", math.NaN(), "nan-or"}, {"NaN || ''", "", "nan-or"}, {"NaN || 'abc'", "abc", "nan-or"}, @@ -455,7 +457,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"'' && 3.14", "", "empty-str-and"}, {"'' && 0.0", "", "empty-str-and"}, {"'' && Infinity", "", "empty-str-and"}, - // {"'' && -Infinity", "", "empty-str-and"}, + {"'' && -Infinity", "", "empty-str-and"}, {"'' && NaN", "", "empty-str-and"}, {"'' && ''", "", "empty-str-and"}, {"'' && 'abc'", "", "empty-str-and"}, @@ -469,7 +471,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"'' || 3.14", 3.14, "empty-str-or"}, {"'' || 0.0", 0, "empty-str-or"}, {"'' || Infinity", math.Inf(1), "empty-str-or"}, - // {"'' || -Infinity", math.Inf(-1), "empty-str-or"}, + {"'' || -Infinity", math.Inf(-1), "empty-str-or"}, {"'' || NaN", math.NaN(), "empty-str-or"}, {"'' || ''", "", "empty-str-or"}, {"'' || 'abc'", "abc", "empty-str-or"}, @@ -483,7 +485,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"'abc' && 3.14", 3.14, "str-and"}, {"'abc' && 0.0", 0, "str-and"}, {"'abc' && Infinity", math.Inf(1), "str-and"}, - // {"'abc' && -Infinity", math.Inf(-1), "str-and"}, + {"'abc' && -Infinity", math.Inf(-1), "str-and"}, {"'abc' && NaN", math.NaN(), "str-and"}, {"'abc' && ''", "", "str-and"}, {"'abc' && 'abc'", "abc", "str-and"}, @@ -497,7 +499,7 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { {"'abc' || 3.14", "abc", "str-or"}, {"'abc' || 0.0", "abc", "str-or"}, {"'abc' || Infinity", "abc", "str-or"}, - // {"'abc' || -Infinity", "abc", "str-or"}, + {"'abc' || -Infinity", "abc", "str-or"}, {"'abc' || NaN", "abc", "str-or"}, {"'abc' || ''", "abc", "str-or"}, {"'abc' || 'abc'", "abc", "str-or"}, @@ -517,6 +519,11 @@ func TestOperatorsBooleanEvaluation(t *testing.T) { output, err := NewInterpeter(env, Config{}).Evaluate(tt.input, DefaultStatusCheckNone) assert.Nil(t, err) + // Normalize int => float64 + if i, ok := tt.expected.(int); ok { + tt.expected = (float64)(i) + } + if expected, ok := tt.expected.(float64); ok && math.IsNaN(expected) { assert.True(t, math.IsNaN(output.(float64))) } else { @@ -543,9 +550,9 @@ func TestContexts(t *testing.T) { {input: "github.event.pull_request.labels.*.name", expected: nil, name: "github-context-noexist-prop"}, {input: "env.TEST", expected: "value", name: "env-context"}, {input: "env.TEST", expected: "value", name: "env-context", caseSensitiveEnv: true}, - {input: "env.test", expected: "", name: "env-context", caseSensitiveEnv: true}, + {input: "env.test", expected: nil, name: "env-context", caseSensitiveEnv: true}, {input: "env['TEST']", expected: "value", name: "env-context", caseSensitiveEnv: true}, - {input: "env['test']", expected: "", name: "env-context", caseSensitiveEnv: true}, + {input: "env['test']", expected: nil, name: "env-context", caseSensitiveEnv: true}, {input: "env.test", expected: "value", name: "env-context"}, {input: "job.status", expected: "success", name: "job-context"}, {input: "steps.step-id.outputs.name", expected: "value", name: "steps-context"}, @@ -561,10 +568,9 @@ func TestContexts(t *testing.T) { {input: "steps['step-id']['outcome'] && true", expected: true, name: "steps-context-outcome"}, {input: "steps.step-id2.outcome", expected: "failure", name: "steps-context-outcome"}, {input: "steps.step-id2.outcome && true", expected: true, name: "steps-context-outcome"}, - // Disabled, since the interpreter is still too broken - // {"contains(steps.*.outcome, 'success')", true, "steps-context-array-outcome"}, - // {"contains(steps.*.outcome, 'failure')", true, "steps-context-array-outcome"}, - // {"contains(steps.*.outputs.name, 'value')", true, "steps-context-array-outputs"}, + {input: "contains(steps.*.outcome, 'success')", expected: true, name: "steps-context-array-outcome"}, + {input: "contains(steps.*.outcome, 'failure')", expected: true, name: "steps-context-array-outcome"}, + {input: "contains(steps.*.outputs.name, 'value')", expected: true, name: "steps-context-array-outputs"}, {input: "runner.os", expected: "Linux", name: "runner-context"}, {input: "secrets.name", expected: "value", name: "secrets-context"}, {input: "vars.name", expected: "value", name: "vars-context"}, diff --git a/pkg/exprparser/testdata/for-hashing-1.txt b/pkg/exprparser/testdata/for-hashing-1.txt deleted file mode 100644 index e965047a..00000000 --- a/pkg/exprparser/testdata/for-hashing-1.txt +++ /dev/null @@ -1 +0,0 @@ -Hello diff --git a/pkg/exprparser/testdata/for-hashing-2.txt b/pkg/exprparser/testdata/for-hashing-2.txt deleted file mode 100644 index 496c8755..00000000 --- a/pkg/exprparser/testdata/for-hashing-2.txt +++ /dev/null @@ -1 +0,0 @@ -World! diff --git a/pkg/exprparser/testdata/for-hashing-3/data.txt b/pkg/exprparser/testdata/for-hashing-3/data.txt deleted file mode 100644 index 5ac7bf9b..00000000 --- a/pkg/exprparser/testdata/for-hashing-3/data.txt +++ /dev/null @@ -1 +0,0 @@ -Knock knock! diff --git a/pkg/exprparser/testdata/for-hashing-3/nested/nested-data.txt b/pkg/exprparser/testdata/for-hashing-3/nested/nested-data.txt deleted file mode 100644 index ebe288b2..00000000 --- a/pkg/exprparser/testdata/for-hashing-3/nested/nested-data.txt +++ /dev/null @@ -1 +0,0 @@ -Anybody home? diff --git a/pkg/model/workflow.go b/pkg/model/workflow.go index e663fc9a..db1c0a2f 100644 --- a/pkg/model/workflow.go +++ b/pkg/model/workflow.go @@ -69,6 +69,9 @@ func (w *Workflow) OnEvent(event string) interface{} { } func (w *Workflow) UnmarshalYAML(node *yaml.Node) error { + if err := resolveAliases(node); err != nil { + return err + } // Validate the schema before deserializing it into our model if err := (&schema.Node{ Definition: "workflow-root", @@ -76,9 +79,6 @@ func (w *Workflow) UnmarshalYAML(node *yaml.Node) error { }).UnmarshalYAML(node); err != nil { return errors.Join(err, fmt.Errorf("actions YAML Schema Validation Error detected:\nFor more information, see: https://actions-oss.github.io/act-docs/usage/schema.html")) } - if err := resolveAliases(node); err != nil { - return err - } type WorkflowDefault Workflow return node.Decode((*WorkflowDefault)(w)) } @@ -86,6 +86,9 @@ func (w *Workflow) UnmarshalYAML(node *yaml.Node) error { type WorkflowStrict Workflow func (w *WorkflowStrict) UnmarshalYAML(node *yaml.Node) error { + if err := resolveAliases(node); err != nil { + return err + } // Validate the schema before deserializing it into our model if err := (&schema.Node{ Definition: "workflow-root-strict", @@ -93,9 +96,6 @@ func (w *WorkflowStrict) UnmarshalYAML(node *yaml.Node) error { }).UnmarshalYAML(node); err != nil { return errors.Join(err, fmt.Errorf("actions YAML Strict Schema Validation Error detected:\nFor more information, see: https://nektosact.com/usage/schema.html")) } - if err := resolveAliases(node); err != nil { - return err - } type WorkflowDefault Workflow return node.Decode((*WorkflowDefault)(w)) } diff --git a/pkg/model/workflow_test.go b/pkg/model/workflow_test.go index 6868cb28..1d466625 100644 --- a/pkg/model/workflow_test.go +++ b/pkg/model/workflow_test.go @@ -444,6 +444,12 @@ func TestStep_ShellCommand(t *testing.T) { func TestReadWorkflow_WorkflowDispatchConfig(t *testing.T) { yaml := ` name: local-action-docker-url + on: push + jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo Test ` workflow, err := ReadWorkflow(strings.NewReader(yaml), false) assert.NoError(t, err, "read workflow should succeed") @@ -452,16 +458,12 @@ func TestReadWorkflow_WorkflowDispatchConfig(t *testing.T) { yaml = ` name: local-action-docker-url - on: push - ` - workflow, err = ReadWorkflow(strings.NewReader(yaml), false) - assert.NoError(t, err, "read workflow should succeed") - workflowDispatch = workflow.WorkflowDispatchConfig() - assert.Nil(t, workflowDispatch) - - yaml = ` - name: local-action-docker-url on: workflow_dispatch + jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo Test ` workflow, err = ReadWorkflow(strings.NewReader(yaml), false) assert.NoError(t, err, "read workflow should succeed") @@ -472,6 +474,11 @@ func TestReadWorkflow_WorkflowDispatchConfig(t *testing.T) { yaml = ` name: local-action-docker-url on: [push, pull_request] + jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo Test ` workflow, err = ReadWorkflow(strings.NewReader(yaml), false) assert.NoError(t, err, "read workflow should succeed") @@ -481,6 +488,11 @@ func TestReadWorkflow_WorkflowDispatchConfig(t *testing.T) { yaml = ` name: local-action-docker-url on: [push, workflow_dispatch] + jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo Test ` workflow, err = ReadWorkflow(strings.NewReader(yaml), false) assert.NoError(t, err, "read workflow should succeed") @@ -493,6 +505,11 @@ func TestReadWorkflow_WorkflowDispatchConfig(t *testing.T) { on: - push - workflow_dispatch + jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo Test ` workflow, err = ReadWorkflow(strings.NewReader(yaml), false) assert.NoError(t, err, "read workflow should succeed") @@ -505,6 +522,11 @@ func TestReadWorkflow_WorkflowDispatchConfig(t *testing.T) { on: push: pull_request: + jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo Test ` workflow, err = ReadWorkflow(strings.NewReader(yaml), false) assert.NoError(t, err, "read workflow should succeed") @@ -527,6 +549,11 @@ func TestReadWorkflow_WorkflowDispatchConfig(t *testing.T) { - info - warning - debug + jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo Test ` workflow, err = ReadWorkflow(strings.NewReader(yaml), false) assert.NoError(t, err, "read workflow should succeed") diff --git a/pkg/runner/expression_test.go b/pkg/runner/expression_test.go index c0776e1d..34d5f92a 100644 --- a/pkg/runner/expression_test.go +++ b/pkg/runner/expression_test.go @@ -143,6 +143,9 @@ func TestEvaluateRunContext(t *testing.T) { out, err := ee.evaluate(context.Background(), table.in, exprparser.DefaultStatusCheckNone) if table.errMesg == "" { assertObject.NoError(err, table.in) + if i, ok := table.out.(int); ok { + table.out = float64(i) + } assertObject.Equal(table.out, out, table.in) } else { assertObject.Error(err, table.in) diff --git a/pkg/runner/testdata/workflow_call_inputs/workflow_call_inputs.yml b/pkg/runner/testdata/workflow_call_inputs/workflow_call_inputs.yml index 1f9bd9fe..5ec26359 100644 --- a/pkg/runner/testdata/workflow_call_inputs/workflow_call_inputs.yml +++ b/pkg/runner/testdata/workflow_call_inputs/workflow_call_inputs.yml @@ -5,14 +5,17 @@ on: inputs: required: description: a required input + type: string required: true with_default: description: an input with default required: false + type: string default: default with_default2: description: an input with default required: false + type: string default: ${{ github.event_name }} boolean: description: an input of type boolean diff --git a/pkg/schema/gitea_schema.go b/pkg/schema/gitea_schema.go new file mode 100644 index 00000000..01ae8607 --- /dev/null +++ b/pkg/schema/gitea_schema.go @@ -0,0 +1,31 @@ +package schema + +import "slices" + +func GetGiteaWorkflowSchema() *Schema { + schema := GetWorkflowSchema() + in := schema.Definitions + schema.Definitions = map[string]Definition{} + for k, v := range in { + if v.Context != nil && slices.Contains(v.Context, "github") { + v.Context = append(v.Context, "gitea", "env") + } + if k == "step-if" || k == "job-if" || k == "string-strategy-context" { + v.Context = append(v.Context, "secrets") + } + schema.Definitions[k] = v + } + updateUses(schema.Definitions["workflow-job"].Mapping) + updateUses(schema.Definitions["regular-step"].Mapping) + + schema.Definitions["container-mapping"].Mapping.Properties["cmd"] = MappingProperty{ + Type: "sequence-of-non-empty-string", + } + return schema +} + +func updateUses(mapping *MappingDefinition) { + uses := mapping.Properties["uses"] + uses.Type = "string-strategy-context" + mapping.Properties["uses"] = uses +} diff --git a/pkg/schema/gitea_schema_test.go b/pkg/schema/gitea_schema_test.go new file mode 100644 index 00000000..7d4926a4 --- /dev/null +++ b/pkg/schema/gitea_schema_test.go @@ -0,0 +1,19 @@ +package schema + +import ( + "encoding/json" + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGiteaSchemaFactory(t *testing.T) { + schema := GetGiteaWorkflowSchema() + _ = schema + + data, err := json.MarshalIndent(schema, "", " ") + assert.NoError(t, err) + err = os.WriteFile("gitea_workflow_schema.json", append(data, "\n"...), 0o600) + assert.NoError(t, err) +} diff --git a/pkg/schema/gitea_workflow_schema.json b/pkg/schema/gitea_workflow_schema.json new file mode 100644 index 00000000..7e4e48ec --- /dev/null +++ b/pkg/schema/gitea_workflow_schema.json @@ -0,0 +1,2678 @@ +{ + "Definitions": { + "boolean-needs-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "gitea", + "env" + ], + "boolean": {} + }, + "boolean-steps-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env", + "hashFiles(1,255)", + "gitea", + "env" + ], + "boolean": {} + }, + "boolean-strategy-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "boolean": {} + }, + "branch-protection-rule": { + "one-of": [ + "null", + "branch-protection-rule-mapping" + ] + }, + "branch-protection-rule-activity": { + "one-of": [ + "branch-protection-rule-activity-type", + "branch-protection-rule-activity-types" + ] + }, + "branch-protection-rule-activity-type": { + "allowed-values": [ + "created", + "edited", + "deleted" + ] + }, + "branch-protection-rule-activity-types": { + "sequence": { + "item-type": "branch-protection-rule-activity-type" + } + }, + "branch-protection-rule-mapping": { + "mapping": { + "properties": { + "types": { + "type": "branch-protection-rule-activity" + } + } + } + }, + "branch-protection-rule-string": { + "string": { + "constant": "branch_protection_rule" + } + }, + "check-run": { + "one-of": [ + "null", + "check-run-mapping" + ] + }, + "check-run-activity": { + "one-of": [ + "check-run-activity-type", + "check-run-activity-types" + ] + }, + "check-run-activity-type": { + "allowed-values": [ + "completed", + "created", + "rerequested", + "requested_action" + ] + }, + "check-run-activity-types": { + "sequence": { + "item-type": "check-run-activity-type" + } + }, + "check-run-mapping": { + "mapping": { + "properties": { + "types": { + "type": "check-run-activity" + } + } + } + }, + "check-run-string": { + "string": { + "constant": "check_run" + } + }, + "check-suite": { + "one-of": [ + "null", + "check-suite-mapping" + ] + }, + "check-suite-activity": { + "one-of": [ + "check-suite-activity-type", + "check-suite-activity-types" + ] + }, + "check-suite-activity-type": { + "allowed-values": [ + "completed" + ] + }, + "check-suite-activity-types": { + "sequence": { + "item-type": "check-suite-activity-type" + } + }, + "check-suite-mapping": { + "mapping": { + "properties": { + "types": { + "type": "check-suite-activity" + } + } + } + }, + "check-suite-string": { + "string": { + "constant": "check_suite" + } + }, + "concurrency-mapping": { + "mapping": { + "properties": { + "cancel-in-progress": { + "type": "boolean" + }, + "group": { + "type": "non-empty-string", + "required": true + } + } + } + }, + "container": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "one-of": [ + "string", + "container-mapping" + ] + }, + "container-env": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string-runner-context" + } + }, + "container-mapping": { + "mapping": { + "properties": { + "cmd": { + "type": "sequence-of-non-empty-string" + }, + "credentials": { + "type": "container-registry-credentials" + }, + "env": { + "type": "container-env" + }, + "image": { + "type": "non-empty-string" + }, + "options": { + "type": "non-empty-string" + }, + "ports": { + "type": "sequence-of-non-empty-string" + }, + "volumes": { + "type": "sequence-of-non-empty-string" + } + } + } + }, + "container-registry-credentials": { + "context": [ + "github", + "inputs", + "vars", + "secrets", + "env", + "gitea", + "env" + ], + "mapping": { + "properties": { + "password": { + "type": "non-empty-string" + }, + "username": { + "type": "non-empty-string" + } + } + } + }, + "create": { + "null": {} + }, + "create-string": { + "string": { + "constant": "create" + } + }, + "cron-mapping": { + "mapping": { + "properties": { + "cron": { + "type": "cron-pattern" + } + } + } + }, + "cron-pattern": { + "string": {} + }, + "delete": { + "null": {} + }, + "delete-string": { + "string": { + "constant": "delete" + } + }, + "deployment": { + "null": {} + }, + "deployment-status": { + "null": {} + }, + "deployment-status-string": { + "string": { + "constant": "deployment_status" + } + }, + "deployment-string": { + "string": { + "constant": "deployment" + } + }, + "discussion": { + "one-of": [ + "null", + "discussion-mapping" + ] + }, + "discussion-activity": { + "one-of": [ + "discussion-activity-type", + "discussion-activity-types" + ] + }, + "discussion-activity-type": { + "allowed-values": [ + "created", + "edited", + "deleted", + "transferred", + "pinned", + "unpinned", + "labeled", + "unlabeled", + "locked", + "unlocked", + "category_changed", + "answered", + "unanswered" + ] + }, + "discussion-activity-types": { + "sequence": { + "item-type": "discussion-activity-type" + } + }, + "discussion-comment": { + "one-of": [ + "null", + "discussion-comment-mapping" + ] + }, + "discussion-comment-activity": { + "one-of": [ + "discussion-comment-activity-type", + "discussion-comment-activity-types" + ] + }, + "discussion-comment-activity-type": { + "allowed-values": [ + "created", + "edited", + "deleted" + ] + }, + "discussion-comment-activity-types": { + "sequence": { + "item-type": "discussion-comment-activity-type" + } + }, + "discussion-comment-mapping": { + "mapping": { + "properties": { + "types": { + "type": "discussion-comment-activity" + } + } + } + }, + "discussion-comment-string": { + "string": { + "constant": "discussion_comment" + } + }, + "discussion-mapping": { + "mapping": { + "properties": { + "types": { + "type": "discussion-activity" + } + } + } + }, + "discussion-string": { + "string": { + "constant": "discussion" + } + }, + "event-branches": { + "one-of": [ + "non-empty-string", + "sequence-of-non-empty-string" + ] + }, + "event-branches-ignore": { + "one-of": [ + "non-empty-string", + "sequence-of-non-empty-string" + ] + }, + "event-paths": { + "one-of": [ + "non-empty-string", + "sequence-of-non-empty-string" + ] + }, + "event-paths-ignore": { + "one-of": [ + "non-empty-string", + "sequence-of-non-empty-string" + ] + }, + "event-tags": { + "one-of": [ + "non-empty-string", + "sequence-of-non-empty-string" + ] + }, + "event-tags-ignore": { + "one-of": [ + "non-empty-string", + "sequence-of-non-empty-string" + ] + }, + "fork": { + "null": {} + }, + "fork-string": { + "string": { + "constant": "fork" + } + }, + "gollum": { + "null": {} + }, + "gollum-string": { + "string": { + "constant": "gollum" + } + }, + "input-type-boolean": { + "string": { + "constant": "boolean" + } + }, + "input-type-choice": { + "string": { + "constant": "choice" + } + }, + "input-type-environment": { + "string": { + "constant": "environment" + } + }, + "input-type-number": { + "string": { + "constant": "number" + } + }, + "input-type-string": { + "string": { + "constant": "string" + } + }, + "issue-comment": { + "one-of": [ + "null", + "issue-comment-mapping" + ] + }, + "issue-comment-activity": { + "one-of": [ + "issue-comment-activity-type", + "issue-comment-activity-types" + ] + }, + "issue-comment-activity-type": { + "allowed-values": [ + "created", + "edited", + "deleted" + ] + }, + "issue-comment-activity-types": { + "sequence": { + "item-type": "issue-comment-activity-type" + } + }, + "issue-comment-mapping": { + "mapping": { + "properties": { + "types": { + "type": "issue-comment-activity" + } + } + } + }, + "issue-comment-string": { + "string": { + "constant": "issue_comment" + } + }, + "issues": { + "one-of": [ + "null", + "issues-mapping" + ] + }, + "issues-activity": { + "one-of": [ + "issues-activity-type", + "issues-activity-types" + ] + }, + "issues-activity-type": { + "allowed-values": [ + "opened", + "edited", + "deleted", + "transferred", + "pinned", + "unpinned", + "closed", + "reopened", + "assigned", + "unassigned", + "labeled", + "unlabeled", + "locked", + "unlocked", + "milestoned", + "demilestoned" + ] + }, + "issues-activity-types": { + "sequence": { + "item-type": "issues-activity-type" + } + }, + "issues-mapping": { + "mapping": { + "properties": { + "types": { + "type": "issues-activity" + } + } + } + }, + "issues-string": { + "string": { + "constant": "issues" + } + }, + "job": { + "one-of": [ + "job-factory", + "workflow-job" + ] + }, + "job-concurrency": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "one-of": [ + "non-empty-string", + "concurrency-mapping" + ] + }, + "job-defaults": { + "mapping": { + "properties": { + "run": { + "type": "job-defaults-run" + } + } + } + }, + "job-defaults-run": { + "context": [ + "github", + "inputs", + "vars", + "strategy", + "matrix", + "needs", + "env", + "gitea", + "env" + ], + "mapping": { + "properties": { + "shell": { + "type": "shell" + }, + "working-directory": { + "type": "working-directory" + } + } + } + }, + "job-env": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "gitea", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + "job-environment": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "one-of": [ + "string", + "job-environment-mapping" + ] + }, + "job-environment-mapping": { + "mapping": { + "properties": { + "name": { + "type": "job-environment-name", + "required": true + }, + "url": { + "type": "string-runner-context-no-secrets" + } + } + } + }, + "job-environment-name": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "string": {} + }, + "job-factory": { + "mapping": { + "properties": { + "cancel-timeout-minutes": { + "type": "number-strategy-context" + }, + "concurrency": { + "type": "job-concurrency" + }, + "container": { + "type": "container" + }, + "continue-on-error": { + "type": "boolean-strategy-context" + }, + "defaults": { + "type": "job-defaults" + }, + "env": { + "type": "job-env" + }, + "environment": { + "type": "job-environment" + }, + "if": { + "type": "job-if" + }, + "name": { + "type": "string-strategy-context" + }, + "needs": { + "type": "needs" + }, + "outputs": { + "type": "job-outputs" + }, + "permissions": { + "type": "permissions" + }, + "runs-on": { + "type": "runs-on", + "required": true + }, + "services": { + "type": "services" + }, + "steps": { + "type": "steps" + }, + "strategy": { + "type": "strategy" + }, + "timeout-minutes": { + "type": "number-strategy-context" + } + } + } + }, + "job-id": { + "string": {} + }, + "job-if": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "always(0,0)", + "failure(0,MAX)", + "cancelled(0,0)", + "success(0,MAX)", + "gitea", + "env", + "secrets" + ], + "string": { + "is-expression": true + } + }, + "job-if-result": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "always(0,0)", + "failure(0,MAX)", + "cancelled(0,0)", + "success(0,MAX)", + "gitea", + "env" + ], + "one-of": [ + "null", + "boolean", + "number", + "string", + "sequence", + "mapping" + ] + }, + "job-outputs": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string-runner-context" + } + }, + "jobs": { + "mapping": { + "loose-key-type": "job-id", + "loose-value-type": "job" + } + }, + "label": { + "one-of": [ + "null", + "label-mapping" + ] + }, + "label-activity": { + "one-of": [ + "label-activity-type", + "label-activity-types" + ] + }, + "label-activity-type": { + "allowed-values": [ + "created", + "edited", + "deleted" + ] + }, + "label-activity-types": { + "sequence": { + "item-type": "label-activity-type" + } + }, + "label-mapping": { + "mapping": { + "properties": { + "types": { + "type": "label-activity" + } + } + } + }, + "label-string": { + "string": { + "constant": "label" + } + }, + "matrix": { + "mapping": { + "properties": { + "exclude": { + "type": "matrix-filter" + }, + "include": { + "type": "matrix-filter" + } + }, + "loose-key-type": "non-empty-string", + "loose-value-type": "sequence" + } + }, + "matrix-filter": { + "sequence": { + "item-type": "matrix-filter-item" + } + }, + "matrix-filter-item": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "any" + } + }, + "merge-group": { + "one-of": [ + "null", + "merge-group-mapping" + ] + }, + "merge-group-activity": { + "one-of": [ + "merge-group-activity-type", + "merge-group-activity-types" + ] + }, + "merge-group-activity-type": { + "allowed-values": [ + "checks_requested" + ] + }, + "merge-group-activity-types": { + "sequence": { + "item-type": "merge-group-activity-type" + } + }, + "merge-group-mapping": { + "mapping": { + "properties": { + "branches": { + "type": "event-branches" + }, + "branches-ignore": { + "type": "event-branches-ignore" + }, + "types": { + "type": "merge-group-activity" + } + } + } + }, + "merge-group-string": { + "string": { + "constant": "merge_group" + } + }, + "milestone": { + "one-of": [ + "null", + "milestone-mapping" + ] + }, + "milestone-activity": { + "one-of": [ + "milestone-activity-type", + "milestone-activity-types" + ] + }, + "milestone-activity-type": { + "allowed-values": [ + "created", + "closed", + "opened", + "edited", + "deleted" + ] + }, + "milestone-activity-types": { + "sequence": { + "item-type": "milestone-activity-type" + } + }, + "milestone-mapping": { + "mapping": { + "properties": { + "types": { + "type": "milestone-activity" + } + } + } + }, + "milestone-string": { + "string": { + "constant": "milestone" + } + }, + "needs": { + "one-of": [ + "sequence-of-non-empty-string", + "non-empty-string" + ] + }, + "non-empty-string": { + "string": {} + }, + "number-needs-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "gitea", + "env" + ], + "number": {} + }, + "number-steps-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env", + "hashFiles(1,255)", + "gitea", + "env" + ], + "number": {} + }, + "number-strategy-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "number": {} + }, + "on": { + "one-of": [ + "string", + "sequence", + "on-mapping" + ] + }, + "on-mapping": { + "mapping": { + "properties": { + "workflow_call": { + "type": "workflow-call" + } + }, + "loose-key-type": "non-empty-string", + "loose-value-type": "any" + } + }, + "on-mapping-strict": { + "mapping": { + "properties": { + "branch_protection_rule": { + "type": "branch-protection-rule" + }, + "check_run": { + "type": "check-run" + }, + "check_suite": { + "type": "check-suite" + }, + "create": { + "type": "create" + }, + "delete": { + "type": "delete" + }, + "deployment": { + "type": "deployment" + }, + "deployment_status": { + "type": "deployment-status" + }, + "discussion": { + "type": "discussion" + }, + "discussion_comment": { + "type": "discussion-comment" + }, + "fork": { + "type": "fork" + }, + "gollum": { + "type": "gollum" + }, + "issue_comment": { + "type": "issue-comment" + }, + "issues": { + "type": "issues" + }, + "label": { + "type": "label" + }, + "merge_group": { + "type": "merge-group" + }, + "milestone": { + "type": "milestone" + }, + "page_build": { + "type": "page-build" + }, + "project": { + "type": "project" + }, + "project_card": { + "type": "project-card" + }, + "project_column": { + "type": "project-column" + }, + "public": { + "type": "public" + }, + "pull_request": { + "type": "pull-request" + }, + "pull_request_comment": { + "type": "pull-request-comment" + }, + "pull_request_review": { + "type": "pull-request-review" + }, + "pull_request_review_comment": { + "type": "pull-request-review-comment" + }, + "pull_request_target": { + "type": "pull-request-target" + }, + "push": { + "type": "push" + }, + "registry_package": { + "type": "registry-package" + }, + "release": { + "type": "release" + }, + "repository_dispatch": { + "type": "repository-dispatch" + }, + "schedule": { + "type": "schedule" + }, + "status": { + "type": "status" + }, + "watch": { + "type": "watch" + }, + "workflow_call": { + "type": "workflow-call" + }, + "workflow_dispatch": { + "type": "workflow-dispatch" + }, + "workflow_run": { + "type": "workflow-run" + } + } + } + }, + "on-sequence-strict": { + "sequence": { + "item-type": "on-string-strict" + } + }, + "on-strict": { + "one-of": [ + "on-string-strict", + "on-sequence-strict", + "on-mapping-strict" + ] + }, + "on-string-strict": { + "one-of": [ + "branch-protection-rule-string", + "check-run-string", + "check-suite-string", + "create-string", + "delete-string", + "deployment-string", + "deployment-status-string", + "discussion-string", + "discussion-comment-string", + "fork-string", + "gollum-string", + "issue-comment-string", + "issues-string", + "label-string", + "merge-group-string", + "milestone-string", + "page-build-string", + "project-string", + "project-card-string", + "project-column-string", + "public-string", + "pull-request-string", + "pull-request-comment-string", + "pull-request-review-string", + "pull-request-review-comment-string", + "pull-request-target-string", + "push-string", + "registry-package-string", + "release-string", + "repository-dispatch-string", + "schedule-string", + "status-string", + "watch-string", + "workflow-call-string", + "workflow-dispatch-string", + "workflow-run-string" + ] + }, + "page-build": { + "null": {} + }, + "page-build-string": { + "string": { + "constant": "page_build" + } + }, + "permission-level-any": { + "one-of": [ + "permission-level-read", + "permission-level-write", + "permission-level-no-access" + ] + }, + "permission-level-no-access": { + "string": { + "constant": "none" + } + }, + "permission-level-read": { + "string": { + "constant": "read" + } + }, + "permission-level-read-or-no-access": { + "one-of": [ + "permission-level-read", + "permission-level-no-access" + ] + }, + "permission-level-shorthand-read-all": { + "string": { + "constant": "read-all" + } + }, + "permission-level-shorthand-write-all": { + "string": { + "constant": "write-all" + } + }, + "permission-level-write": { + "string": { + "constant": "write" + } + }, + "permission-level-write-or-no-access": { + "one-of": [ + "permission-level-write", + "permission-level-no-access" + ] + }, + "permissions": { + "one-of": [ + "permissions-mapping", + "permission-level-shorthand-read-all", + "permission-level-shorthand-write-all" + ] + }, + "permissions-mapping": { + "mapping": { + "properties": { + "actions": { + "type": "permission-level-any" + }, + "attestations": { + "type": "permission-level-any" + }, + "checks": { + "type": "permission-level-any" + }, + "contents": { + "type": "permission-level-any" + }, + "deployments": { + "type": "permission-level-any" + }, + "discussions": { + "type": "permission-level-any" + }, + "id-token": { + "type": "permission-level-write-or-no-access" + }, + "issues": { + "type": "permission-level-any" + }, + "models": { + "type": "permission-level-read-or-no-access" + }, + "packages": { + "type": "permission-level-any" + }, + "pages": { + "type": "permission-level-any" + }, + "pull-requests": { + "type": "permission-level-any" + }, + "repository-projects": { + "type": "permission-level-any" + }, + "security-events": { + "type": "permission-level-any" + }, + "statuses": { + "type": "permission-level-any" + } + } + } + }, + "project": { + "one-of": [ + "null", + "project-mapping" + ] + }, + "project-activity": { + "one-of": [ + "project-activity-type", + "project-activity-types" + ] + }, + "project-activity-type": { + "allowed-values": [ + "created", + "closed", + "reopened", + "edited", + "deleted" + ] + }, + "project-activity-types": { + "sequence": { + "item-type": "project-activity-type" + } + }, + "project-card": { + "one-of": [ + "null", + "project-card-mapping" + ] + }, + "project-card-activity": { + "one-of": [ + "project-card-activity-type", + "project-card-activity-types" + ] + }, + "project-card-activity-type": { + "allowed-values": [ + "created", + "moved", + "converted", + "edited", + "deleted" + ] + }, + "project-card-activity-types": { + "sequence": { + "item-type": "project-card-activity-type" + } + }, + "project-card-mapping": { + "mapping": { + "properties": { + "types": { + "type": "project-card-activity" + } + } + } + }, + "project-card-string": { + "string": { + "constant": "project_card" + } + }, + "project-column": { + "one-of": [ + "null", + "project-column-mapping" + ] + }, + "project-column-activity": { + "one-of": [ + "project-column-activity-type", + "project-column-activity-types" + ] + }, + "project-column-activity-type": { + "allowed-values": [ + "created", + "updated", + "moved", + "deleted" + ] + }, + "project-column-activity-types": { + "sequence": { + "item-type": "project-column-activity-type" + } + }, + "project-column-mapping": { + "mapping": { + "properties": { + "types": { + "type": "project-column-activity" + } + } + } + }, + "project-column-string": { + "string": { + "constant": "project_column" + } + }, + "project-mapping": { + "mapping": { + "properties": { + "types": { + "type": "project-activity" + } + } + } + }, + "project-string": { + "string": { + "constant": "project" + } + }, + "public": { + "null": {} + }, + "public-string": { + "string": { + "constant": "public" + } + }, + "pull-request": { + "one-of": [ + "null", + "pull-request-mapping" + ] + }, + "pull-request-activity": { + "one-of": [ + "pull-request-activity-type", + "pull-request-activity-types" + ] + }, + "pull-request-activity-type": { + "allowed-values": [ + "assigned", + "unassigned", + "labeled", + "unlabeled", + "opened", + "edited", + "closed", + "reopened", + "synchronize", + "converted_to_draft", + "ready_for_review", + "locked", + "unlocked", + "review_requested", + "review_request_removed", + "auto_merge_enabled", + "auto_merge_disabled" + ] + }, + "pull-request-activity-types": { + "sequence": { + "item-type": "pull-request-activity-type" + } + }, + "pull-request-comment": { + "one-of": [ + "null", + "issue-comment-mapping" + ] + }, + "pull-request-comment-string": { + "string": { + "constant": "pull_request_comment" + } + }, + "pull-request-mapping": { + "mapping": { + "properties": { + "branches": { + "type": "event-branches" + }, + "branches-ignore": { + "type": "event-branches-ignore" + }, + "paths": { + "type": "event-paths" + }, + "paths-ignore": { + "type": "event-paths-ignore" + }, + "types": { + "type": "pull-request-activity" + } + } + } + }, + "pull-request-review": { + "one-of": [ + "null", + "pull-request-review-mapping" + ] + }, + "pull-request-review-activity": { + "one-of": [ + "pull-request-review-activity-type", + "pull-request-review-activity-types" + ] + }, + "pull-request-review-activity-type": { + "allowed-values": [ + "submitted", + "edited", + "dismissed" + ] + }, + "pull-request-review-activity-types": { + "sequence": { + "item-type": "pull-request-review-activity-type" + } + }, + "pull-request-review-comment": { + "one-of": [ + "null", + "pull-request-review-comment-mapping" + ] + }, + "pull-request-review-comment-activity": { + "one-of": [ + "pull-request-review-comment-activity-type", + "pull-request-review-comment-activity-types" + ] + }, + "pull-request-review-comment-activity-type": { + "allowed-values": [ + "created", + "edited", + "deleted" + ] + }, + "pull-request-review-comment-activity-types": { + "sequence": { + "item-type": "pull-request-review-comment-activity-type" + } + }, + "pull-request-review-comment-mapping": { + "mapping": { + "properties": { + "types": { + "type": "pull-request-review-comment-activity" + } + } + } + }, + "pull-request-review-comment-string": { + "string": { + "constant": "pull_request_review_comment" + } + }, + "pull-request-review-mapping": { + "mapping": { + "properties": { + "types": { + "type": "pull-request-review-activity" + } + } + } + }, + "pull-request-review-string": { + "string": { + "constant": "pull_request_review" + } + }, + "pull-request-string": { + "string": { + "constant": "pull_request" + } + }, + "pull-request-target": { + "one-of": [ + "null", + "pull-request-target-mapping" + ] + }, + "pull-request-target-activity": { + "one-of": [ + "pull-request-target-activity-type", + "pull-request-target-activity-types" + ] + }, + "pull-request-target-activity-type": { + "allowed-values": [ + "assigned", + "unassigned", + "labeled", + "unlabeled", + "opened", + "edited", + "closed", + "reopened", + "synchronize", + "converted_to_draft", + "ready_for_review", + "locked", + "unlocked", + "review_requested", + "review_request_removed", + "auto_merge_enabled", + "auto_merge_disabled" + ] + }, + "pull-request-target-activity-types": { + "sequence": { + "item-type": "pull-request-target-activity-type" + } + }, + "pull-request-target-mapping": { + "mapping": { + "properties": { + "branches": { + "type": "event-branches" + }, + "branches-ignore": { + "type": "event-branches-ignore" + }, + "paths": { + "type": "event-paths" + }, + "paths-ignore": { + "type": "event-paths-ignore" + }, + "types": { + "type": "pull-request-target-activity" + } + } + } + }, + "pull-request-target-string": { + "string": { + "constant": "pull_request_target" + } + }, + "push": { + "one-of": [ + "null", + "push-mapping" + ] + }, + "push-mapping": { + "mapping": { + "properties": { + "branches": { + "type": "event-branches" + }, + "branches-ignore": { + "type": "event-branches-ignore" + }, + "paths": { + "type": "event-paths" + }, + "paths-ignore": { + "type": "event-paths-ignore" + }, + "tags": { + "type": "event-tags" + }, + "tags-ignore": { + "type": "event-tags-ignore" + } + } + } + }, + "push-string": { + "string": { + "constant": "push" + } + }, + "registry-package": { + "one-of": [ + "null", + "registry-package-mapping" + ] + }, + "registry-package-activity": { + "one-of": [ + "registry-package-activity-type", + "registry-package-activity-types" + ] + }, + "registry-package-activity-type": { + "allowed-values": [ + "published", + "updated" + ] + }, + "registry-package-activity-types": { + "sequence": { + "item-type": "registry-package-activity-type" + } + }, + "registry-package-mapping": { + "mapping": { + "properties": { + "types": { + "type": "registry-package-activity" + } + } + } + }, + "registry-package-string": { + "string": { + "constant": "registry_package" + } + }, + "regular-step": { + "mapping": { + "properties": { + "continue-on-error": { + "type": "step-continue-on-error" + }, + "env": { + "type": "step-env" + }, + "id": { + "type": "step-id" + }, + "if": { + "type": "step-if" + }, + "name": { + "type": "step-name" + }, + "timeout-minutes": { + "type": "step-timeout-minutes" + }, + "uses": { + "type": "string-strategy-context", + "required": true + }, + "with": { + "type": "step-with" + } + } + } + }, + "release": { + "one-of": [ + "null", + "release-mapping" + ] + }, + "release-activity": { + "one-of": [ + "release-activity-type", + "release-activity-types" + ] + }, + "release-activity-type": { + "allowed-values": [ + "published", + "unpublished", + "created", + "edited", + "deleted", + "prereleased", + "released" + ] + }, + "release-activity-types": { + "sequence": { + "item-type": "release-activity-type" + } + }, + "release-mapping": { + "mapping": { + "properties": { + "types": { + "type": "release-activity" + } + } + } + }, + "release-string": { + "string": { + "constant": "release" + } + }, + "repository-dispatch": { + "one-of": [ + "null", + "repository-dispatch-mapping" + ] + }, + "repository-dispatch-mapping": { + "mapping": { + "properties": { + "types": { + "type": "sequence-of-non-empty-string" + } + } + } + }, + "repository-dispatch-string": { + "string": { + "constant": "branch_protection_rule" + } + }, + "run-name": { + "context": [ + "github", + "inputs", + "vars", + "gitea", + "env" + ], + "string": {} + }, + "run-step": { + "mapping": { + "properties": { + "continue-on-error": { + "type": "step-continue-on-error" + }, + "env": { + "type": "step-env" + }, + "id": { + "type": "step-id" + }, + "if": { + "type": "step-if" + }, + "name": { + "type": "step-name" + }, + "run": { + "type": "string-steps-context", + "required": true + }, + "shell": { + "type": "shell" + }, + "timeout-minutes": { + "type": "step-timeout-minutes" + }, + "working-directory": { + "type": "string-steps-context" + } + } + } + }, + "runs-on": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "one-of": [ + "non-empty-string", + "sequence-of-non-empty-string", + "runs-on-mapping" + ] + }, + "runs-on-labels": { + "one-of": [ + "non-empty-string", + "sequence-of-non-empty-string" + ] + }, + "runs-on-mapping": { + "mapping": { + "properties": { + "group": { + "type": "non-empty-string" + }, + "labels": { + "type": "runs-on-labels" + } + } + } + }, + "scalar-needs-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "one-of": [ + "string", + "boolean", + "number" + ] + }, + "scalar-needs-context-with-secrets": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "secrets", + "strategy", + "matrix", + "gitea", + "env" + ], + "one-of": [ + "string", + "boolean", + "number" + ] + }, + "schedule": { + "sequence": { + "item-type": "cron-mapping" + } + }, + "schedule-string": { + "string": { + "constant": "schedule" + } + }, + "sequence-of-non-empty-string": { + "sequence": { + "item-type": "non-empty-string" + } + }, + "services": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "services-container" + } + }, + "services-container": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env" + ], + "one-of": [ + "non-empty-string", + "container-mapping" + ] + }, + "shell": { + "string": {} + }, + "status": { + "null": {} + }, + "status-string": { + "string": { + "constant": "status" + } + }, + "step-continue-on-error": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env", + "hashFiles(1,255)", + "gitea", + "env" + ], + "boolean": {} + }, + "step-env": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env", + "hashFiles(1,255)", + "gitea", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + "step-id": { + "string": {} + }, + "step-if": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "steps", + "job", + "runner", + "env", + "always(0,0)", + "failure(0,0)", + "cancelled(0,0)", + "success(0,0)", + "hashFiles(1,255)", + "gitea", + "env", + "secrets" + ], + "string": { + "is-expression": true + } + }, + "step-if-result": { + "context": [ + "github", + "inputs", + "vars", + "strategy", + "matrix", + "steps", + "job", + "runner", + "env", + "always(0,0)", + "failure(0,0)", + "cancelled(0,0)", + "success(0,0)", + "hashFiles(1,255)", + "gitea", + "env" + ], + "one-of": [ + "null", + "boolean", + "number", + "string", + "sequence", + "mapping" + ] + }, + "step-name": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env", + "hashFiles(1,255)", + "gitea", + "env" + ], + "string": {} + }, + "step-timeout-minutes": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env", + "hashFiles(1,255)", + "gitea", + "env" + ], + "number": {} + }, + "step-uses": { + "string": {} + }, + "step-with": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env", + "hashFiles(1,255)", + "gitea", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + "steps": { + "sequence": { + "item-type": "steps-item" + } + }, + "steps-item": { + "one-of": [ + "run-step", + "regular-step" + ] + }, + "strategy": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "gitea", + "env" + ], + "mapping": { + "properties": { + "fail-fast": { + "type": "boolean" + }, + "matrix": { + "type": "matrix" + }, + "max-parallel": { + "type": "number" + } + } + } + }, + "string-needs-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "gitea", + "env" + ], + "string": {} + }, + "string-runner-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env", + "gitea", + "env" + ], + "string": {} + }, + "string-runner-context-no-secrets": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "steps", + "job", + "runner", + "env", + "gitea", + "env" + ], + "string": {} + }, + "string-steps-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env", + "hashFiles(1,255)", + "gitea", + "env" + ], + "string": {} + }, + "string-strategy-context": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix", + "gitea", + "env", + "secrets" + ], + "string": {} + }, + "watch": { + "one-of": [ + "null", + "watch-mapping" + ] + }, + "watch-activity": { + "one-of": [ + "watch-activity-type", + "watch-activity-types" + ] + }, + "watch-activity-type": { + "allowed-values": [ + "started" + ] + }, + "watch-activity-types": { + "sequence": { + "item-type": "watch-activity-type" + } + }, + "watch-mapping": { + "mapping": { + "properties": { + "types": { + "type": "watch-activity" + } + } + } + }, + "watch-string": { + "string": { + "constant": "watch" + } + }, + "workflow-call": { + "one-of": [ + "null", + "workflow-call-mapping" + ] + }, + "workflow-call-input-default": { + "context": [ + "github", + "inputs", + "vars", + "gitea", + "env" + ], + "one-of": [ + "string", + "boolean", + "number" + ] + }, + "workflow-call-input-definition": { + "mapping": { + "properties": { + "default": { + "type": "workflow-call-input-default" + }, + "description": { + "type": "string" + }, + "required": { + "type": "boolean" + }, + "type": { + "type": "workflow-call-input-type", + "required": true + } + } + } + }, + "workflow-call-input-type": { + "one-of": [ + "input-type-string", + "input-type-boolean", + "input-type-number" + ] + }, + "workflow-call-inputs": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "workflow-call-input-definition" + } + }, + "workflow-call-mapping": { + "mapping": { + "properties": { + "inputs": { + "type": "workflow-call-inputs" + }, + "outputs": { + "type": "workflow-call-outputs" + }, + "secrets": { + "type": "workflow-call-secrets" + } + } + } + }, + "workflow-call-output-definition": { + "mapping": { + "properties": { + "description": { + "type": "string" + }, + "value": { + "type": "workflow-output-context", + "required": true + } + } + } + }, + "workflow-call-output-name": { + "string": {} + }, + "workflow-call-outputs": { + "mapping": { + "loose-key-type": "workflow-call-output-name", + "loose-value-type": "workflow-call-output-definition" + } + }, + "workflow-call-secret-definition": { + "one-of": [ + "null", + "workflow-call-secret-mapping-definition" + ] + }, + "workflow-call-secret-mapping-definition": { + "mapping": { + "properties": { + "description": { + "type": "string" + }, + "required": { + "type": "boolean" + } + } + } + }, + "workflow-call-secret-name": { + "string": {} + }, + "workflow-call-secrets": { + "mapping": { + "loose-key-type": "workflow-call-secret-name", + "loose-value-type": "workflow-call-secret-definition" + } + }, + "workflow-call-string": { + "string": { + "constant": "workflow_call" + } + }, + "workflow-concurrency": { + "context": [ + "github", + "inputs", + "vars", + "gitea", + "env" + ], + "one-of": [ + "string", + "concurrency-mapping" + ] + }, + "workflow-defaults": { + "mapping": { + "properties": { + "run": { + "type": "workflow-defaults-run" + } + } + } + }, + "workflow-defaults-run": { + "mapping": { + "properties": { + "shell": { + "type": "shell" + }, + "working-directory": { + "type": "working-directory" + } + } + } + }, + "workflow-description": { + "string": {} + }, + "workflow-dispatch": { + "one-of": [ + "null", + "workflow-dispatch-mapping" + ] + }, + "workflow-dispatch-input": { + "mapping": { + "properties": { + "default": { + "type": "workflow-dispatch-input-default" + }, + "description": { + "type": "string" + }, + "options": { + "type": "sequence-of-non-empty-string" + }, + "required": { + "type": "boolean" + }, + "type": { + "type": "workflow-dispatch-input-type" + } + } + } + }, + "workflow-dispatch-input-default": { + "one-of": [ + "string", + "boolean", + "number" + ] + }, + "workflow-dispatch-input-name": { + "string": {} + }, + "workflow-dispatch-input-type": { + "one-of": [ + "input-type-string", + "input-type-boolean", + "input-type-number", + "input-type-environment", + "input-type-choice" + ] + }, + "workflow-dispatch-inputs": { + "mapping": { + "loose-key-type": "workflow-dispatch-input-name", + "loose-value-type": "workflow-dispatch-input" + } + }, + "workflow-dispatch-mapping": { + "mapping": { + "properties": { + "inputs": { + "type": "workflow-dispatch-inputs" + } + } + } + }, + "workflow-dispatch-string": { + "string": { + "constant": "workflow_dispatch" + } + }, + "workflow-env": { + "context": [ + "github", + "inputs", + "vars", + "secrets", + "gitea", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + "workflow-job": { + "mapping": { + "properties": { + "concurrency": { + "type": "job-concurrency" + }, + "if": { + "type": "job-if" + }, + "name": { + "type": "string-strategy-context" + }, + "needs": { + "type": "needs" + }, + "permissions": { + "type": "permissions" + }, + "secrets": { + "type": "workflow-job-secrets" + }, + "strategy": { + "type": "strategy" + }, + "uses": { + "type": "string-strategy-context", + "required": true + }, + "with": { + "type": "workflow-job-with" + } + } + } + }, + "workflow-job-secrets": { + "one-of": [ + "workflow-job-secrets-mapping", + "workflow-job-secrets-inherit" + ] + }, + "workflow-job-secrets-inherit": { + "string": { + "constant": "inherit" + } + }, + "workflow-job-secrets-mapping": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "scalar-needs-context-with-secrets" + } + }, + "workflow-job-with": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "scalar-needs-context" + } + }, + "workflow-name": { + "string": {} + }, + "workflow-output-context": { + "context": [ + "github", + "inputs", + "vars", + "jobs", + "gitea", + "env" + ], + "string": {} + }, + "workflow-root": { + "mapping": { + "properties": { + "concurrency": { + "type": "workflow-concurrency" + }, + "defaults": { + "type": "workflow-defaults" + }, + "description": { + "type": "workflow-description" + }, + "env": { + "type": "workflow-env" + }, + "jobs": { + "type": "jobs", + "required": true + }, + "name": { + "type": "workflow-name" + }, + "on": { + "type": "on" + }, + "permissions": { + "type": "permissions" + }, + "run-name": { + "type": "run-name" + } + } + } + }, + "workflow-root-strict": { + "mapping": { + "properties": { + "concurrency": { + "type": "workflow-concurrency" + }, + "defaults": { + "type": "workflow-defaults" + }, + "description": { + "type": "workflow-description" + }, + "env": { + "type": "workflow-env" + }, + "jobs": { + "type": "jobs", + "required": true + }, + "name": { + "type": "workflow-name" + }, + "on": { + "type": "on-strict", + "required": true + }, + "permissions": { + "type": "permissions" + }, + "run-name": { + "type": "run-name" + } + } + } + }, + "workflow-run": { + "one-of": [ + "null", + "workflow-run-mapping" + ] + }, + "workflow-run-activity": { + "one-of": [ + "workflow-run-activity-type", + "workflow-run-activity-types" + ] + }, + "workflow-run-activity-type": { + "allowed-values": [ + "requested", + "completed", + "in_progress" + ] + }, + "workflow-run-activity-types": { + "sequence": { + "item-type": "workflow-run-activity-type" + } + }, + "workflow-run-mapping": { + "mapping": { + "properties": { + "branches": { + "type": "event-branches" + }, + "branches-ignore": { + "type": "event-branches-ignore" + }, + "types": { + "type": "workflow-run-activity" + }, + "workflows": { + "type": "workflow-run-workflows" + } + } + } + }, + "workflow-run-string": { + "string": { + "constant": "workflow_run" + } + }, + "workflow-run-workflows": { + "one-of": [ + "non-empty-string", + "sequence-of-non-empty-string" + ] + }, + "working-directory": { + "string": {} + } + } +} diff --git a/pkg/schema/schema.go b/pkg/schema/schema.go index 5c26559e..efde10d6 100644 --- a/pkg/schema/schema.go +++ b/pkg/schema/schema.go @@ -10,7 +10,7 @@ import ( "strconv" "strings" - "github.com/rhysd/actionlint" + exprparser "github.com/actions-oss/act-cli/internal/expr" "gopkg.in/yaml.v3" ) @@ -22,6 +22,87 @@ var actionSchema string var functions = regexp.MustCompile(`^([a-zA-Z0-9_]+)\(([0-9]+),([0-9]+|MAX)\)$`) +type ValidationKind int + +const ( + ValidationKindFatal ValidationKind = iota + ValidationKindWarning + ValidationKindInvalidProperty + ValidationKindMismatched + ValidationKindMissingProperty +) + +type Location struct { + Line int + Column int +} + +type ValidationError struct { + Kind ValidationKind + Location + Message string +} + +func (e ValidationError) Error() string { + return fmt.Sprintf("Line: %d Column %d: %s", e.Line, e.Column, e.Message) +} + +type ValidationErrorCollection struct { + Errors []ValidationError + Collections []ValidationErrorCollection +} + +func indent(builder *strings.Builder, in string) { + for _, v := range strings.Split(in, "\n") { + if v != "" { + builder.WriteString(" ") + builder.WriteString(v) + } + builder.WriteString("\n") + } +} + +func (c ValidationErrorCollection) Error() string { + var builder strings.Builder + for _, e := range c.Errors { + if builder.Len() > 0 { + builder.WriteString("\n") + } + builder.WriteString(e.Error()) + } + for _, e := range c.Collections { + if builder.Len() > 0 { + builder.WriteString("\n") + } + indent(&builder, e.Error()) + } + return builder.String() +} + +func (c *ValidationErrorCollection) AddError(err ValidationError) { + c.Errors = append(c.Errors, err) +} + +func AsValidationErrorCollection(err error) *ValidationErrorCollection { + if col, ok := err.(ValidationErrorCollection); ok { + return &col + } + if col, ok := err.(*ValidationErrorCollection); ok { + return col + } + if e, ok := err.(ValidationError); ok { + return &ValidationErrorCollection{ + Errors: []ValidationError{e}, + } + } + if e, ok := err.(*ValidationError); ok { + return &ValidationErrorCollection{ + Errors: []ValidationError{*e}, + } + } + return nil +} + type Schema struct { Definitions map[string]Definition } @@ -50,26 +131,26 @@ func (s *Schema) GetDefinition(name string) Definition { } type Definition struct { - Context []string - Mapping *MappingDefinition - Sequence *SequenceDefinition - OneOf *[]string `json:"one-of"` - AllowedValues *[]string `json:"allowed-values"` - String *StringDefinition - Number *NumberDefinition - Boolean *BooleanDefinition - Null *NullDefinition + Context []string `json:"context,omitempty"` + Mapping *MappingDefinition `json:"mapping,omitempty"` + Sequence *SequenceDefinition `json:"sequence,omitempty"` + OneOf *[]string `json:"one-of,omitempty"` + AllowedValues *[]string `json:"allowed-values,omitempty"` + String *StringDefinition `json:"string,omitempty"` + Number *NumberDefinition `json:"number,omitempty"` + Boolean *BooleanDefinition `json:"boolean,omitempty"` + Null *NullDefinition `json:"null,omitempty"` } type MappingDefinition struct { - Properties map[string]MappingProperty - LooseKeyType string `json:"loose-key-type"` - LooseValueType string `json:"loose-value-type"` + Properties map[string]MappingProperty `json:"properties,omitempty"` + LooseKeyType string `json:"loose-key-type,omitempty"` + LooseValueType string `json:"loose-value-type,omitempty"` } type MappingProperty struct { - Type string - Required bool + Type string `json:"type,omitempty"` + Required bool `json:"required,omitempty"` } func (s *MappingProperty) UnmarshalJSON(data []byte) error { @@ -85,8 +166,8 @@ type SequenceDefinition struct { } type StringDefinition struct { - Constant string - IsExpression bool `json:"is-expression"` + Constant string `json:"constant,omitempty"` + IsExpression bool `json:"is-expression,omitempty"` } type NumberDefinition struct { @@ -111,23 +192,22 @@ func GetActionSchema() *Schema { } type Node struct { - Definition string - Schema *Schema - Context []string + RestrictEval bool + Definition string + Schema *Schema + Context []string } type FunctionInfo struct { - name string - min int - max int + Name string + Min int + Max int } -func (s *Node) checkSingleExpression(exprNode actionlint.ExprNode) error { +func (s *Node) checkSingleExpression(exprNode exprparser.Node) error { if len(s.Context) == 0 { - switch exprNode.Token().Kind { - case actionlint.TokenKindInt: - case actionlint.TokenKindFloat: - case actionlint.TokenKindString: + switch exprNode.(type) { + case *exprparser.ValueNode: return nil default: return fmt.Errorf("expressions are not allowed here") @@ -137,42 +217,44 @@ func (s *Node) checkSingleExpression(exprNode actionlint.ExprNode) error { funcs := s.GetFunctions() var err error - actionlint.VisitExprNode(exprNode, func(node, _ actionlint.ExprNode, entering bool) { - if funcCallNode, ok := node.(*actionlint.FuncCallNode); entering && ok { - for _, v := range *funcs { - if strings.EqualFold(funcCallNode.Callee, v.name) { - if v.min > len(funcCallNode.Args) { - err = errors.Join(err, fmt.Errorf("missing parameters for %s expected >= %v got %v", funcCallNode.Callee, v.min, len(funcCallNode.Args))) + exprparser.VisitNode(exprNode, func(node exprparser.Node) { + if funcCallNode, ok := node.(*exprparser.FunctionNode); ok { + for _, v := range funcs { + if strings.EqualFold(funcCallNode.Name, v.Name) { + if v.Min > len(funcCallNode.Args) { + err = errors.Join(err, fmt.Errorf("missing parameters for %s expected >= %v got %v", funcCallNode.Name, v.Min, len(funcCallNode.Args))) } - if v.max < len(funcCallNode.Args) { - err = errors.Join(err, fmt.Errorf("too many parameters for %s expected <= %v got %v", funcCallNode.Callee, v.max, len(funcCallNode.Args))) + if v.Max < len(funcCallNode.Args) { + err = errors.Join(err, fmt.Errorf("too many parameters for %s expected <= %v got %v", funcCallNode.Name, v.Max, len(funcCallNode.Args))) } return } } - err = errors.Join(err, fmt.Errorf("unknown Function Call %s", funcCallNode.Callee)) + err = errors.Join(err, fmt.Errorf("unknown Function Call %s", funcCallNode.Name)) } - if varNode, ok := node.(*actionlint.VariableNode); entering && ok { - for _, v := range s.Context { - if strings.EqualFold(varNode.Name, v) { - return + if varNode, ok := node.(*exprparser.ValueNode); ok && varNode.Kind == exprparser.TokenKindNamedValue { + if str, ok := varNode.Value.(string); ok { + for _, v := range s.Context { + if strings.EqualFold(str, v) { + return + } } } - err = errors.Join(err, fmt.Errorf("unknown Variable Access %s", varNode.Name)) + err = errors.Join(err, fmt.Errorf("unknown Variable Access %v", varNode.Value)) } }) return err } -func (s *Node) GetFunctions() *[]FunctionInfo { - funcs := &[]FunctionInfo{} - AddFunction(funcs, "contains", 2, 2) - AddFunction(funcs, "endsWith", 2, 2) - AddFunction(funcs, "format", 1, 255) - AddFunction(funcs, "join", 1, 2) - AddFunction(funcs, "startsWith", 2, 2) - AddFunction(funcs, "toJson", 1, 1) - AddFunction(funcs, "fromJson", 1, 1) +func (s *Node) GetFunctions() []FunctionInfo { + funcs := []FunctionInfo{} + AddFunction(&funcs, "contains", 2, 2) + AddFunction(&funcs, "endsWith", 2, 2) + AddFunction(&funcs, "format", 1, 255) + AddFunction(&funcs, "join", 1, 2) + AddFunction(&funcs, "startsWith", 2, 2) + AddFunction(&funcs, "toJson", 1, 1) + AddFunction(&funcs, "fromJson", 1, 1) for _, v := range s.Context { i := strings.Index(v, "(") if i == -1 { @@ -189,17 +271,32 @@ func (s *Node) GetFunctions() *[]FunctionInfo { } else { maxParameters, _ = strconv.ParseInt(maxParametersRaw, 10, 32) } - *funcs = append(*funcs, FunctionInfo{ - name: functionName, - min: int(minParameters), - max: int(maxParameters), + funcs = append(funcs, FunctionInfo{ + Name: functionName, + Min: int(minParameters), + Max: int(maxParameters), }) } } return funcs } +func exprEnd(expr string) int { + var inQuotes bool + for i, v := range expr { + if v == '\'' { + inQuotes = !inQuotes + } else if !inQuotes && i+1 < len(expr) && expr[i:i+2] == "}}" { + return i + } + } + return -1 +} + func (s *Node) checkExpression(node *yaml.Node) (bool, error) { + if s.RestrictEval { + return false, nil + } val := node.Value hadExpr := false var err error @@ -211,26 +308,32 @@ func (s *Node) checkExpression(node *yaml.Node) (bool, error) { } hadExpr = true - parser := actionlint.NewExprParser() - lexer := actionlint.NewExprLexer(val) - exprNode, parseErr := parser.Parse(lexer) + j := exprEnd(val) + + exprNode, parseErr := exprparser.Parse(val[:j]) if parseErr != nil { - err = errors.Join(err, fmt.Errorf("%sFailed to parse: %s", formatLocation(node), parseErr.Message)) + err = errors.Join(err, ValidationError{ + Location: toLocation(node), + Message: fmt.Sprintf("failed to parse: %s", parseErr.Error()), + }) continue } - val = val[lexer.Offset():] + val = val[j+2:] cerr := s.checkSingleExpression(exprNode) if cerr != nil { - err = errors.Join(err, fmt.Errorf("%s%w", formatLocation(node), cerr)) + err = errors.Join(err, ValidationError{ + Location: toLocation(node), + Message: cerr.Error(), + }) } } } func AddFunction(funcs *[]FunctionInfo, s string, i1, i2 int) { *funcs = append(*funcs, FunctionInfo{ - name: s, - min: i1, - max: i2, + Name: s, + Min: i1, + Max: i2, }) } @@ -238,9 +341,6 @@ func (s *Node) UnmarshalYAML(node *yaml.Node) error { if node != nil && node.Kind == yaml.DocumentNode { return s.UnmarshalYAML(node.Content[0]) } - if node.Kind == yaml.AliasNode { - node = node.Alias - } def := s.Schema.GetDefinition(s.Definition) if s.Context == nil { s.Context = def.Context @@ -261,8 +361,8 @@ func (s *Node) UnmarshalYAML(node *yaml.Node) error { return s.checkOneOf(def, node) } - if node.Kind != yaml.ScalarNode { - return fmt.Errorf("%sExpected a scalar got %v", formatLocation(node), getStringKind(node.Kind)) + if err := assertKind(node, yaml.ScalarNode); err != nil { + return err } if def.String != nil { @@ -280,50 +380,99 @@ func (s *Node) UnmarshalYAML(node *yaml.Node) error { return nil } } - return fmt.Errorf("%sExpected one of %s got %s", formatLocation(node), strings.Join(*def.AllowedValues, ","), s) + return ValidationError{ + Location: toLocation(node), + Message: fmt.Sprintf("expected one of %s got %s", strings.Join(*def.AllowedValues, ","), s), + } } else if def.Null != nil { var myNull *byte - return node.Decode(&myNull) + if err := node.Decode(&myNull); err != nil { + return err + } + if myNull != nil { + return ValidationError{ + Location: toLocation(node), + Message: "invalid Null", + } + } + return nil } return errors.ErrUnsupported } func (s *Node) checkString(node *yaml.Node, def Definition) error { + // caller checks node type val := node.Value if def.String.Constant != "" && def.String.Constant != val { - return fmt.Errorf("%sExpected %s got %s", formatLocation(node), def.String.Constant, val) + return ValidationError{ + Location: toLocation(node), + Message: fmt.Sprintf("expected %s got %s", def.String.Constant, val), + } } - if def.String.IsExpression { - parser := actionlint.NewExprParser() - lexer := actionlint.NewExprLexer(val + "}}") - exprNode, parseErr := parser.Parse(lexer) + if def.String.IsExpression && !s.RestrictEval { + exprNode, parseErr := exprparser.Parse(node.Value) if parseErr != nil { - return fmt.Errorf("%sFailed to parse: %s", formatLocation(node), parseErr.Message) + return ValidationError{ + Location: toLocation(node), + Message: fmt.Sprintf("failed to parse: %s", parseErr.Error()), + } } cerr := s.checkSingleExpression(exprNode) if cerr != nil { - return fmt.Errorf("%s%w", formatLocation(node), cerr) + return ValidationError{ + Location: toLocation(node), + Message: cerr.Error(), + } } } return nil } func (s *Node) checkOneOf(def Definition, node *yaml.Node) error { - var allErrors error + var invalidProps = math.MaxInt + var bestMatches ValidationErrorCollection for _, v := range *def.OneOf { - sub := &Node{ - Definition: v, - Schema: s.Schema, - Context: append(append([]string{}, s.Context...), s.Schema.GetDefinition(v).Context...), - } - + // Use helper to create child node + sub := s.childNode(v) err := sub.UnmarshalYAML(node) if err == nil { return nil } - allErrors = errors.Join(allErrors, fmt.Errorf("%sFailed to match %s: %w", formatLocation(node), v, err)) + if col := AsValidationErrorCollection(err); col != nil { + var matched int + for _, e := range col.Errors { + if e.Kind == ValidationKindInvalidProperty { + matched++ + } + if e.Kind == ValidationKindMismatched { + if math.MaxInt == invalidProps { + bestMatches.Collections = append(bestMatches.Collections, *col) + continue + } + } + } + if matched == 0 { + matched = math.MaxInt + } + if matched <= invalidProps { + if matched < invalidProps { + // clear, we have better matching ones + bestMatches.Collections = nil + } + bestMatches.Collections = append(bestMatches.Collections, *col) + invalidProps = matched + } + continue + } + bestMatches.Errors = append(bestMatches.Errors, ValidationError{ + Location: toLocation(node), + Message: fmt.Sprintf("failed to match %s: %s", v, err.Error()), + }) } - return allErrors + if len(bestMatches.Errors) > 0 || len(bestMatches.Collections) > 0 { + return bestMatches + } + return nil } func getStringKind(k yaml.Kind) string { @@ -344,65 +493,216 @@ func getStringKind(k yaml.Kind) string { } func (s *Node) checkSequence(node *yaml.Node, def Definition) error { - if node.Kind != yaml.SequenceNode { - return fmt.Errorf("%sExpected a sequence got %v", formatLocation(node), getStringKind(node.Kind)) + if err := assertKind(node, yaml.SequenceNode); err != nil { + return err } var allErrors error for _, v := range node.Content { - allErrors = errors.Join(allErrors, (&Node{ - Definition: def.Sequence.ItemType, - Schema: s.Schema, - Context: append(append([]string{}, s.Context...), s.Schema.GetDefinition(def.Sequence.ItemType).Context...), - }).UnmarshalYAML(v)) + // Use helper to create child node + child := s.childNode(def.Sequence.ItemType) + allErrors = errors.Join(allErrors, child.UnmarshalYAML(v)) } return allErrors } -func formatLocation(node *yaml.Node) string { - return fmt.Sprintf("Line: %v Column %v: ", node.Line, node.Column) +func toLocation(node *yaml.Node) Location { + return Location{Line: node.Line, Column: node.Column} +} + +func assertKind(node *yaml.Node, kind yaml.Kind) error { + if node.Kind != kind { + return ValidationError{ + Location: toLocation(node), + Kind: ValidationKindMismatched, + Message: fmt.Sprintf("expected a %s got %s", getStringKind(kind), getStringKind(node.Kind)), + } + } + return nil +} + +func (s *Node) GetNestedNode(path ...string) *Node { + if len(path) == 0 { + return s + } + def := s.Schema.GetDefinition(s.Definition) + if def.Mapping != nil { + prop, ok := def.Mapping.Properties[path[0]] + if !ok { + if def.Mapping.LooseValueType == "" { + return nil + } + return s.childNode(def.Mapping.LooseValueType).GetNestedNode(path[1:]...) + } + return s.childNode(prop.Type).GetNestedNode(path[1:]...) + } + if def.Sequence != nil { + // OneOf Branching + if path[0] != "*" { + return nil + } + return s.childNode(def.Sequence.ItemType).GetNestedNode(path[1:]...) + } + if def.OneOf != nil { + for _, one := range *def.OneOf { + opt := s.childNode(one).GetNestedNode(path...) + if opt != nil { + return opt + } + } + return nil + } + return nil } func (s *Node) checkMapping(node *yaml.Node, def Definition) error { - if node.Kind != yaml.MappingNode { - return fmt.Errorf("%sExpected a mapping got %v", formatLocation(node), getStringKind(node.Kind)) + if err := assertKind(node, yaml.MappingNode); err != nil { + return err } insertDirective := regexp.MustCompile(`\${{\s*insert\s*}}`) - var allErrors error + var allErrors ValidationErrorCollection + var hasKeyExpr bool + usedProperties := map[string]string{} for i, k := range node.Content { if i%2 == 0 { if insertDirective.MatchString(k.Value) { if len(s.Context) == 0 { - allErrors = errors.Join(allErrors, fmt.Errorf("%sinsert is not allowed here", formatLocation(k))) + allErrors.AddError(ValidationError{ + Location: toLocation(node), + Message: "insert is not allowed here", + }) } + hasKeyExpr = true continue } isExpr, err := s.checkExpression(k) if err != nil { - allErrors = errors.Join(allErrors, err) + allErrors.AddError(ValidationError{ + Location: toLocation(node), + Message: err.Error(), + }) + hasKeyExpr = true continue } if isExpr { + hasKeyExpr = true continue } + if org, ok := usedProperties[strings.ToLower(k.Value)]; !ok { + // duplicate check case insensitive + usedProperties[strings.ToLower(k.Value)] = k.Value + // schema check case sensitive + usedProperties[k.Value] = k.Value + } else { + allErrors.AddError(ValidationError{ + // Kind: ValidationKindInvalidProperty, + Location: toLocation(node), + Message: fmt.Sprintf("duplicate property %v of %v", k.Value, org), + }) + } vdef, ok := def.Mapping.Properties[k.Value] if !ok { if def.Mapping.LooseValueType == "" { - allErrors = errors.Join(allErrors, fmt.Errorf("%sUnknown Property %v", formatLocation(k), k.Value)) + allErrors.AddError(ValidationError{ + Kind: ValidationKindInvalidProperty, + Location: toLocation(node), + Message: fmt.Sprintf("unknown property %v", k.Value), + }) continue } vdef = MappingProperty{Type: def.Mapping.LooseValueType} } - if err := (&Node{ - Definition: vdef.Type, - Schema: s.Schema, - Context: append(append([]string{}, s.Context...), s.Schema.GetDefinition(vdef.Type).Context...), - }).UnmarshalYAML(node.Content[i+1]); err != nil { - allErrors = errors.Join(allErrors, err) + // Use helper to create child node + child := s.childNode(vdef.Type) + if err := child.UnmarshalYAML(node.Content[i+1]); err != nil { + if col := AsValidationErrorCollection(err); col != nil { + allErrors.AddError(ValidationError{ + Location: toLocation(node.Content[i+1]), + Message: fmt.Sprintf("error found in value of key %s", k.Value), + }) + allErrors.Collections = append(allErrors.Collections, *col) + continue + } + allErrors.AddError(ValidationError{ + Location: toLocation(node), + Message: err.Error(), + }) continue } } } + if !hasKeyExpr { + for k, v := range def.Mapping.Properties { + if _, ok := usedProperties[k]; !ok && v.Required { + allErrors.AddError(ValidationError{ + Location: toLocation(node), + Kind: ValidationKindMissingProperty, + Message: fmt.Sprintf("missing property %s", k), + }) + } + } + } + if len(allErrors.Errors) == 0 && len(allErrors.Collections) == 0 { + return nil + } return allErrors } + +func (s *Node) childNode(defName string) *Node { + return &Node{ + RestrictEval: s.RestrictEval, + Definition: defName, + Schema: s.Schema, + Context: append(append([]string{}, s.Context...), s.Schema.GetDefinition(defName).Context...), + } +} + +func (s *Node) GetVariables() []string { + // Return only variable names (exclude function signatures) + vars := []string{} + for _, v := range s.Context { + if !strings.Contains(v, "(") { + vars = append(vars, v) + } + } + return vars +} + +// ValidateExpression checks whether all variables and functions used in the expressions +// inside the provided yaml.Node are present in the allowed sets. It returns false +// if any variable or function is missing. +func (s *Node) ValidateExpression(node *yaml.Node, allowedVars map[string]struct{}, allowedFuncs map[string]struct{}) bool { + val := node.Value + for { + i := strings.Index(val, "${{") + if i == -1 { + break + } + val = val[i+3:] + j := exprEnd(val) + exprNode, parseErr := exprparser.Parse(val[:j]) + if parseErr != nil { + return false + } + val = val[j+2:] + // walk expression tree + exprparser.VisitNode(exprNode, func(n exprparser.Node) { + switch el := n.(type) { + case *exprparser.FunctionNode: + if _, ok := allowedFuncs[el.Name]; !ok { + // missing function + // use a panic to break out + panic("missing function") + } + case *exprparser.ValueNode: + if el.Kind == exprparser.TokenKindNamedValue { + if _, ok := allowedVars[el.Value.(string)]; !ok { + panic("missing variable") + } + } + } + }) + } + return true +} diff --git a/pkg/schema/schema_test.go b/pkg/schema/schema_test.go index 7e3a100d..856da5ac 100644 --- a/pkg/schema/schema_test.go +++ b/pkg/schema/schema_test.go @@ -91,22 +91,53 @@ jobs: assert.NoError(t, err) } -func TestYAMLAnchors(t *testing.T) { +func TestFailure(t *testing.T) { var node yaml.Node err := yaml.Unmarshal([]byte(` on: push jobs: job-with-condition: - runs-on: &label - self-hosted - if: success() || success('joba', 'jobb') || failure() || failure('joba', 'jobb') || always() || cancelled() - steps: &steps - - run: exit 0 - then: - runs-on: *label + runs-on: self-hosted + x: failure +`), &node) + if !assert.NoError(t, err) { + return + } + err = (&Node{ + Definition: "workflow-root-strict", + Schema: GetWorkflowSchema(), + }).UnmarshalYAML(&node) + assert.Error(t, err) +} + +func TestFailure2(t *testing.T) { + var node yaml.Node + err := yaml.Unmarshal([]byte(` +on: push +jobs: + job-with-condition: + runs-on: self-hosted + Runs-on: failure +`), &node) + if !assert.NoError(t, err) { + return + } + err = (&Node{ + Definition: "workflow-root-strict", + Schema: GetWorkflowSchema(), + }).UnmarshalYAML(&node) + assert.Error(t, err) +} + +func TestEscape(t *testing.T) { + var node yaml.Node + err := yaml.Unmarshal([]byte(` +${{ 'on' }}: push +jobs: + job-with-condition: + runs-on: self-hosted steps: - run: exit 0 - `), &node) if !assert.NoError(t, err) { return