forgejo-runner-act/pkg/model/workflow.go
Markus Wolf 679cac1677
Fix composite input handling (#1345)
* test: add test case for #1319

* fix: setup of composite inputs

This change fixes the composite action setup handling of inputs.

All inputs are taken from the env now. The env is composed of
the 'level above'.
For example:
- step env -> taken from run context
- action env -> taken from step env
- composite env -> taken from action env

Before this change the env setup for steps, actions and composite
run contexts was harder to understand as all parts looked into
one of these: parent run context, step, action, composite run context.

Now the 'data flow' is from higher levels to lower levels which should
make it more clean.

Fixes #1319

* test: add simple remote composite action test

Since we don't have a remote composite test at all
before this, we need at least the simplest case.
This does not check every feature, but ensures basic
availability of remote composite actions.

* refactor: move ActionRef and ActionRepository

Moving ActionRef and ActionRepository from RunContext into the
step, allows us to remove the - more or less - ugly copy operations
from the RunContext.

This is more clean, as each step does hold the data required anyway
and the RunContext shouldn't know about the action details.

* refactor: remove unused properties
2022-10-06 21:58:16 +00:00

513 lines
14 KiB
Go

package model
import (
"fmt"
"io"
"reflect"
"regexp"
"strconv"
"strings"
"github.com/nektos/act/pkg/common"
log "github.com/sirupsen/logrus"
"gopkg.in/yaml.v3"
)
// Workflow is the structure of the files in .github/workflows
type Workflow struct {
File string
Name string `yaml:"name"`
RawOn yaml.Node `yaml:"on"`
Env map[string]string `yaml:"env"`
Jobs map[string]*Job `yaml:"jobs"`
Defaults Defaults `yaml:"defaults"`
}
// On events for the workflow
func (w *Workflow) On() []string {
switch w.RawOn.Kind {
case yaml.ScalarNode:
var val string
err := w.RawOn.Decode(&val)
if err != nil {
log.Fatal(err)
}
return []string{val}
case yaml.SequenceNode:
var val []string
err := w.RawOn.Decode(&val)
if err != nil {
log.Fatal(err)
}
return val
case yaml.MappingNode:
var val map[string]interface{}
err := w.RawOn.Decode(&val)
if err != nil {
log.Fatal(err)
}
var keys []string
for k := range val {
keys = append(keys, k)
}
return keys
}
return nil
}
// Job is the structure of one job in a workflow
type Job struct {
Name string `yaml:"name"`
RawNeeds yaml.Node `yaml:"needs"`
RawRunsOn yaml.Node `yaml:"runs-on"`
Env yaml.Node `yaml:"env"`
If yaml.Node `yaml:"if"`
Steps []*Step `yaml:"steps"`
TimeoutMinutes string `yaml:"timeout-minutes"`
Services map[string]*ContainerSpec `yaml:"services"`
Strategy *Strategy `yaml:"strategy"`
RawContainer yaml.Node `yaml:"container"`
Defaults Defaults `yaml:"defaults"`
Outputs map[string]string `yaml:"outputs"`
Uses string `yaml:"uses"`
Result string
}
// Strategy for the job
type Strategy struct {
FailFast bool
MaxParallel int
FailFastString string `yaml:"fail-fast"`
MaxParallelString string `yaml:"max-parallel"`
RawMatrix yaml.Node `yaml:"matrix"`
}
// Default settings that will apply to all steps in the job or workflow
type Defaults struct {
Run RunDefaults `yaml:"run"`
}
// Defaults for all run steps in the job or workflow
type RunDefaults struct {
Shell string `yaml:"shell"`
WorkingDirectory string `yaml:"working-directory"`
}
// GetMaxParallel sets default and returns value for `max-parallel`
func (s Strategy) GetMaxParallel() int {
// MaxParallel default value is `GitHub will maximize the number of jobs run in parallel depending on the available runners on GitHub-hosted virtual machines`
// So I take the liberty to hardcode default limit to 4 and this is because:
// 1: tl;dr: self-hosted does only 1 parallel job - https://github.com/actions/runner/issues/639#issuecomment-825212735
// 2: GH has 20 parallel job limit (for free tier) - https://github.com/github/docs/blob/3ae84420bd10997bb5f35f629ebb7160fe776eae/content/actions/reference/usage-limits-billing-and-administration.md?plain=1#L45
// 3: I want to add support for MaxParallel to act and 20! parallel jobs is a bit overkill IMHO
maxParallel := 4
if s.MaxParallelString != "" {
var err error
if maxParallel, err = strconv.Atoi(s.MaxParallelString); err != nil {
log.Errorf("Failed to parse 'max-parallel' option: %v", err)
}
}
return maxParallel
}
// GetFailFast sets default and returns value for `fail-fast`
func (s Strategy) GetFailFast() bool {
// FailFast option is true by default: https://github.com/github/docs/blob/3ae84420bd10997bb5f35f629ebb7160fe776eae/content/actions/reference/workflow-syntax-for-github-actions.md?plain=1#L1107
failFast := true
log.Debug(s.FailFastString)
if s.FailFastString != "" {
var err error
if failFast, err = strconv.ParseBool(s.FailFastString); err != nil {
log.Errorf("Failed to parse 'fail-fast' option: %v", err)
}
}
return failFast
}
// Container details for the job
func (j *Job) Container() *ContainerSpec {
var val *ContainerSpec
switch j.RawContainer.Kind {
case yaml.ScalarNode:
val = new(ContainerSpec)
err := j.RawContainer.Decode(&val.Image)
if err != nil {
log.Fatal(err)
}
case yaml.MappingNode:
val = new(ContainerSpec)
err := j.RawContainer.Decode(val)
if err != nil {
log.Fatal(err)
}
}
return val
}
// Needs list for Job
func (j *Job) Needs() []string {
switch j.RawNeeds.Kind {
case yaml.ScalarNode:
var val string
err := j.RawNeeds.Decode(&val)
if err != nil {
log.Fatal(err)
}
return []string{val}
case yaml.SequenceNode:
var val []string
err := j.RawNeeds.Decode(&val)
if err != nil {
log.Fatal(err)
}
return val
}
return nil
}
// RunsOn list for Job
func (j *Job) RunsOn() []string {
switch j.RawRunsOn.Kind {
case yaml.ScalarNode:
var val string
err := j.RawRunsOn.Decode(&val)
if err != nil {
log.Fatal(err)
}
return []string{val}
case yaml.SequenceNode:
var val []string
err := j.RawRunsOn.Decode(&val)
if err != nil {
log.Fatal(err)
}
return val
}
return nil
}
func environment(yml yaml.Node) map[string]string {
env := make(map[string]string)
if yml.Kind == yaml.MappingNode {
if err := yml.Decode(&env); err != nil {
log.Fatal(err)
}
}
return env
}
// Environments returns string-based key=value map for a job
func (j *Job) Environment() map[string]string {
return environment(j.Env)
}
// Matrix decodes RawMatrix YAML node
func (j *Job) Matrix() map[string][]interface{} {
if j.Strategy.RawMatrix.Kind == yaml.MappingNode {
var val map[string][]interface{}
if err := j.Strategy.RawMatrix.Decode(&val); err != nil {
log.Fatal(err)
}
return val
}
return nil
}
// GetMatrixes returns the matrix cross product
// It skips includes and hard fails excludes for non-existing keys
//
//nolint:gocyclo
func (j *Job) GetMatrixes() []map[string]interface{} {
matrixes := make([]map[string]interface{}, 0)
if j.Strategy != nil {
j.Strategy.FailFast = j.Strategy.GetFailFast()
j.Strategy.MaxParallel = j.Strategy.GetMaxParallel()
if m := j.Matrix(); m != nil {
includes := make([]map[string]interface{}, 0)
extraIncludes := make([]map[string]interface{}, 0)
for _, v := range m["include"] {
switch t := v.(type) {
case []interface{}:
for _, i := range t {
i := i.(map[string]interface{})
extraInclude := true
for k := range i {
if _, ok := m[k]; ok {
includes = append(includes, i)
extraInclude = false
break
}
}
if extraInclude {
extraIncludes = append(extraIncludes, i)
}
}
case interface{}:
v := v.(map[string]interface{})
extraInclude := true
for k := range v {
if _, ok := m[k]; ok {
includes = append(includes, v)
extraInclude = false
break
}
}
if extraInclude {
extraIncludes = append(extraIncludes, v)
}
}
}
delete(m, "include")
excludes := make([]map[string]interface{}, 0)
for _, e := range m["exclude"] {
e := e.(map[string]interface{})
for k := range e {
if _, ok := m[k]; ok {
excludes = append(excludes, e)
} else {
// We fail completely here because that's what GitHub does for non-existing matrix keys, fail on exclude, silent skip on include
log.Fatalf("The workflow is not valid. Matrix exclude key '%s' does not match any key within the matrix", k)
}
}
}
delete(m, "exclude")
matrixProduct := common.CartesianProduct(m)
MATRIX:
for _, matrix := range matrixProduct {
for _, exclude := range excludes {
if commonKeysMatch(matrix, exclude) {
log.Debugf("Skipping matrix '%v' due to exclude '%v'", matrix, exclude)
continue MATRIX
}
}
matrixes = append(matrixes, matrix)
}
for _, include := range includes {
matched := false
for _, matrix := range matrixes {
if commonKeysMatch2(matrix, include, m) {
matched = true
log.Debugf("Adding include values '%v' to existing entry", include)
for k, v := range include {
matrix[k] = v
}
}
}
if !matched {
extraIncludes = append(extraIncludes, include)
}
}
for _, include := range extraIncludes {
log.Debugf("Adding include '%v'", include)
matrixes = append(matrixes, include)
}
if len(matrixes) == 0 {
matrixes = append(matrixes, make(map[string]interface{}))
}
} else {
matrixes = append(matrixes, make(map[string]interface{}))
}
} else {
matrixes = append(matrixes, make(map[string]interface{}))
}
return matrixes
}
func commonKeysMatch(a map[string]interface{}, b map[string]interface{}) bool {
for aKey, aVal := range a {
if bVal, ok := b[aKey]; ok && !reflect.DeepEqual(aVal, bVal) {
return false
}
}
return true
}
func commonKeysMatch2(a map[string]interface{}, b map[string]interface{}, m map[string][]interface{}) bool {
for aKey, aVal := range a {
_, useKey := m[aKey]
if bVal, ok := b[aKey]; useKey && ok && !reflect.DeepEqual(aVal, bVal) {
return false
}
}
return true
}
// ContainerSpec is the specification of the container to use for the job
type ContainerSpec struct {
Image string `yaml:"image"`
Env map[string]string `yaml:"env"`
Ports []string `yaml:"ports"`
Volumes []string `yaml:"volumes"`
Options string `yaml:"options"`
Credentials map[string]string `yaml:"credentials"`
Entrypoint string
Args string
Name string
Reuse bool
}
// Step is the structure of one step in a job
type Step struct {
ID string `yaml:"id"`
If yaml.Node `yaml:"if"`
Name string `yaml:"name"`
Uses string `yaml:"uses"`
Run string `yaml:"run"`
WorkingDirectory string `yaml:"working-directory"`
Shell string `yaml:"shell"`
Env yaml.Node `yaml:"env"`
With map[string]string `yaml:"with"`
RawContinueOnError string `yaml:"continue-on-error"`
TimeoutMinutes string `yaml:"timeout-minutes"`
}
// String gets the name of step
func (s *Step) String() string {
if s.Name != "" {
return s.Name
} else if s.Uses != "" {
return s.Uses
} else if s.Run != "" {
return s.Run
}
return s.ID
}
// Environments returns string-based key=value map for a step
// Note: all keys are uppercase
func (s *Step) Environment() map[string]string {
env := environment(s.Env)
for k, v := range env {
delete(env, k)
env[strings.ToUpper(k)] = v
}
return env
}
// GetEnv gets the env for a step
func (s *Step) GetEnv() map[string]string {
env := s.Environment()
for k, v := range s.With {
envKey := regexp.MustCompile("[^A-Z0-9-]").ReplaceAllString(strings.ToUpper(k), "_")
envKey = fmt.Sprintf("INPUT_%s", strings.ToUpper(envKey))
env[envKey] = v
}
return env
}
// ShellCommand returns the command for the shell
func (s *Step) ShellCommand() string {
shellCommand := ""
//Reference: https://github.com/actions/runner/blob/8109c962f09d9acc473d92c595ff43afceddb347/src/Runner.Worker/Handlers/ScriptHandlerHelpers.cs#L9-L17
switch s.Shell {
case "", "bash":
shellCommand = "bash --noprofile --norc -e -o pipefail {0}"
case "pwsh":
shellCommand = "pwsh -command . '{0}'"
case "python":
shellCommand = "python {0}"
case "sh":
shellCommand = "sh -e -c {0}"
case "cmd":
shellCommand = "%ComSpec% /D /E:ON /V:OFF /S /C \"CALL \"{0}\"\""
case "powershell":
shellCommand = "powershell -command . '{0}'"
default:
shellCommand = s.Shell
}
return shellCommand
}
// StepType describes what type of step we are about to run
type StepType int
const (
// StepTypeRun is all steps that have a `run` attribute
StepTypeRun StepType = iota
// StepTypeUsesDockerURL is all steps that have a `uses` that is of the form `docker://...`
StepTypeUsesDockerURL
// StepTypeUsesActionLocal is all steps that have a `uses` that is a local action in a subdirectory
StepTypeUsesActionLocal
// StepTypeUsesActionRemote is all steps that have a `uses` that is a reference to a github repo
StepTypeUsesActionRemote
// StepTypeInvalid is for steps that have invalid step action
StepTypeInvalid
)
func (s StepType) String() string {
switch s {
case StepTypeInvalid:
return "invalid"
case StepTypeRun:
return "run"
case StepTypeUsesActionLocal:
return "local-action"
case StepTypeUsesActionRemote:
return "remote-action"
case StepTypeUsesDockerURL:
return "docker"
}
return "unknown"
}
// Type returns the type of the step
func (s *Step) Type() StepType {
if s.Run == "" && s.Uses == "" {
return StepTypeInvalid
}
if s.Run != "" {
if s.Uses != "" {
return StepTypeInvalid
}
return StepTypeRun
} else if strings.HasPrefix(s.Uses, "docker://") {
return StepTypeUsesDockerURL
} else if strings.HasPrefix(s.Uses, "./") {
return StepTypeUsesActionLocal
}
return StepTypeUsesActionRemote
}
// ReadWorkflow returns a list of jobs for a given workflow file reader
func ReadWorkflow(in io.Reader) (*Workflow, error) {
w := new(Workflow)
err := yaml.NewDecoder(in).Decode(w)
return w, err
}
// GetJob will get a job by name in the workflow
func (w *Workflow) GetJob(jobID string) *Job {
for id, j := range w.Jobs {
if jobID == id {
if j.Name == "" {
j.Name = id
}
if j.If.Value == "" {
j.If.Value = "success()"
}
return j
}
}
return nil
}
// GetJobIDs will get all the job names in the workflow
func (w *Workflow) GetJobIDs() []string {
ids := make([]string, 0)
for id := range w.Jobs {
ids = append(ids, id)
}
return ids
}