From fc51b3a6beff50a223fc5f9bdcc85fb7fc841c85 Mon Sep 17 00:00:00 2001 From: Dragos Dumitrache Date: Mon, 30 Jan 2023 15:17:25 +0000 Subject: [PATCH 1/7] Update atlantis from upstream (#64) * Support using repo configuration from a branch Instead of using the atlantis.yaml file present in the pull request branch, allow users to specify a branch that contains an approved version of atlantis.yaml. This allows enabling things like approval requirement overrides for repo config, while ensuring a user can't simply change the configuration to drop an approval requirement in the branch they're currently working on. An example would be creating a server-side configuration like this: ```yaml --- repos: - id: github.com/gocardless/app apply_requirements: [approved, mergeable] allowed_overrides: [apply_requirements, workflow] allow_custom_workflows: false config_source_branch: master workflows: lab: plan: steps: - init - plan: extra_args: [-var-file, lab.tfvars] prd: plan: steps: - init - plan: extra_args: [-var-file, prd.tfvars] ``` Specifying a rigid workflows that become all that is available to run in gocardless/app. Now the application (gocardless/app) would create an atlantis.yaml at the root of the repo, like so: ``` --- version: 2 projects: - dir: terraform/google/projects/apps apply_requirements: [] workspace: lab workflow: lab - dir: terraform/google/projects/apps apply_requirements: [approved] workspace: prd workflow: prd ``` Because config_source_branch is set to master, when someone creates a PR against gocardless/apps, the atlantis.yaml that specifies whether a project has an approved apply_requirement is from the master branch. As is a common pattern, merging to master is protected by an authorised code-review, allowing us to have two types of environment for this project: - lab, where you can plan and apply without approval - prd, where you can only apply once the PR is approved * Bump go to v1.14 in docker * Fixup int to string Resolves this issue after upgrade to go 1.14: server/events/db/boltdb.go:150:87: conversion from int to string yields a string of one rune, not a string of digits (did you mean fmt.Sprint(x)?) Update hook calls to methods whose signature has changed Disabled Dockerfile.dev, currently broken on upstream * Bump up available patch versions of terraform This should mimic the changes on the GC master branch --------- Co-authored-by: Lawrence Jones Co-authored-by: Dyson Simmons --- Dockerfile | 2 +- Dockerfile.dev => _Dockerfile.dev | 0 docker-compose.yml | 4 +- server/core/config/raw/global_cfg.go | 2 + server/core/config/valid/global_cfg.go | 23 +++-- server/core/db/boltdb.go | 2 +- server/events/github_app_working_dir.go | 4 +- server/events/github_app_working_dir_test.go | 2 +- server/events/matchers/slice_of_string.go | 20 ++++ server/events/mock_workingdir_test.go | 20 ++-- server/events/mocks/mock_working_dir.go | 20 ++-- .../post_workflow_hooks_command_runner.go | 2 +- .../pre_workflow_hooks_command_runner.go | 2 +- server/events/project_command_builder.go | 39 +++++++- .../project_command_builder_internal_test.go | 2 +- server/events/project_command_builder_test.go | 16 +-- server/events/project_command_runner.go | 6 +- server/events/project_command_runner_test.go | 2 + server/events/working_dir.go | 99 +++++++++++++------ server/events/working_dir_test.go | 55 ++++++----- 20 files changed, 219 insertions(+), 103 deletions(-) rename Dockerfile.dev => _Dockerfile.dev (100%) create mode 100644 server/events/matchers/slice_of_string.go diff --git a/Dockerfile b/Dockerfile index 5129dd9a3e..9415dd3519 100644 --- a/Dockerfile +++ b/Dockerfile @@ -33,7 +33,7 @@ ENV DEFAULT_TERRAFORM_VERSION=1.3.7 # In the official Atlantis image we only have the latest of each Terraform version. SHELL ["/bin/bash", "-o", "pipefail", "-c"] -RUN AVAILABLE_TERRAFORM_VERSIONS="1.0.11 1.1.9 1.2.9 ${DEFAULT_TERRAFORM_VERSION}" && \ +RUN AVAILABLE_TERRAFORM_VERSIONS="0.8.8 0.9.11 0.10.8 0.11.14 0.11.15 0.12.29 0.12.31 0.13.2 0.13.7 1.0.11 1.1.9 1.2.9 ${DEFAULT_TERRAFORM_VERSION}" && \ case "${TARGETPLATFORM}" in \ "linux/amd64") TERRAFORM_ARCH=amd64 ;; \ "linux/arm64") TERRAFORM_ARCH=arm64 ;; \ diff --git a/Dockerfile.dev b/_Dockerfile.dev similarity index 100% rename from Dockerfile.dev rename to _Dockerfile.dev diff --git a/docker-compose.yml b/docker-compose.yml index 9266996801..e4b756d4ee 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -25,7 +25,7 @@ services: - redis build: context: . - dockerfile: Dockerfile.dev + dockerfile: _Dockerfile.dev ports: - 4141:4141 volumes: @@ -37,4 +37,4 @@ services: volumes: redis: - driver: local \ No newline at end of file + driver: local diff --git a/server/core/config/raw/global_cfg.go b/server/core/config/raw/global_cfg.go index d09361fea1..e29430aa6f 100644 --- a/server/core/config/raw/global_cfg.go +++ b/server/core/config/raw/global_cfg.go @@ -34,6 +34,7 @@ type Repo struct { AllowCustomWorkflows *bool `yaml:"allow_custom_workflows,omitempty" json:"allow_custom_workflows,omitempty"` DeleteSourceBranchOnMerge *bool `yaml:"delete_source_branch_on_merge,omitempty" json:"delete_source_branch_on_merge,omitempty"` RepoLocking *bool `yaml:"repo_locking,omitempty" json:"repo_locking,omitempty"` + ConfigSourceBranch *string `yaml:"config_source_branch,omitempty" json:"config_source_branch,omitempty"` } func (g GlobalCfg) Validate() error { @@ -314,5 +315,6 @@ OuterGlobalImportReqs: AllowCustomWorkflows: r.AllowCustomWorkflows, DeleteSourceBranchOnMerge: r.DeleteSourceBranchOnMerge, RepoLocking: r.RepoLocking, + ConfigSourceBranch: r.ConfigSourceBranch, } } diff --git a/server/core/config/valid/global_cfg.go b/server/core/config/valid/global_cfg.go index 18559e8ecb..2f9d624085 100644 --- a/server/core/config/valid/global_cfg.go +++ b/server/core/config/valid/global_cfg.go @@ -26,6 +26,7 @@ const AllowCustomWorkflowsKey = "allow_custom_workflows" const DefaultWorkflowName = "default" const DeleteSourceBranchOnMergeKey = "delete_source_branch_on_merge" const RepoLockingKey = "repo_locking" +const ConfigSourceBranchKey = "config_source_branch" // DefaultAtlantisFile is the default name of the config file for each repo. const DefaultAtlantisFile = "atlantis.yaml" @@ -80,6 +81,9 @@ type Repo struct { AllowCustomWorkflows *bool DeleteSourceBranchOnMerge *bool RepoLocking *bool + // ConfigSourceBranch specifies the branch that we'll use to checkout the atlantis repo + // configuration + ConfigSourceBranch *string } type MergedProjectCfg struct { @@ -99,6 +103,7 @@ type MergedProjectCfg struct { DeleteSourceBranchOnMerge bool ExecutionOrderGroup int RepoLocking bool + ConfigSourceBranch *string } // WorkflowHook is a map of custom run commands to run before or after workflows. @@ -291,7 +296,7 @@ func (r Repo) IDString() string { // final config. It assumes that all configs have been validated. func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, proj Project, rCfg RepoCfg) MergedProjectCfg { log.Debug("MergeProjectCfg started") - planReqs, applyReqs, importReqs, workflow, allowedOverrides, allowCustomWorkflows, deleteSourceBranchOnMerge, repoLocking := g.getMatchingCfg(log, repoID) + planReqs, applyReqs, importReqs, workflow, allowedOverrides, allowCustomWorkflows, deleteSourceBranchOnMerge, repoLocking, _ := g.GetMatchingCfg(log, repoID) // If repos are allowed to override certain keys then override them. for _, key := range allowedOverrides { @@ -381,7 +386,7 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro // repo with id repoID. It is used when there is no repo config. func (g GlobalCfg) DefaultProjCfg(log logging.SimpleLogging, repoID string, repoRelDir string, workspace string) MergedProjectCfg { log.Debug("building config based on server-side config") - planReqs, applyReqs, importReqs, workflow, _, _, deleteSourceBranchOnMerge, repoLocking := g.getMatchingCfg(log, repoID) + planReqs, applyReqs, importReqs, workflow, _, _, deleteSourceBranchOnMerge, repoLocking, configSourceBranch := g.GetMatchingCfg(log, repoID) return MergedProjectCfg{ PlanRequirements: planReqs, ApplyRequirements: applyReqs, @@ -395,6 +400,7 @@ func (g GlobalCfg) DefaultProjCfg(log logging.SimpleLogging, repoID string, repo PolicySets: g.PolicySets, DeleteSourceBranchOnMerge: deleteSourceBranchOnMerge, RepoLocking: repoLocking, + ConfigSourceBranch: configSourceBranch, } } @@ -496,8 +502,8 @@ func (g GlobalCfg) ValidateRepoCfg(rCfg RepoCfg, repoID string) error { return nil } -// getMatchingCfg returns the key settings for repoID. -func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (planReqs []string, applyReqs []string, importReqs []string, workflow Workflow, allowedOverrides []string, allowCustomWorkflows bool, deleteSourceBranchOnMerge bool, repoLocking bool) { +// GetMatchingCfg returns the key settings for repoID. +func (g GlobalCfg) GetMatchingCfg(log logging.SimpleLogging, repoID string) (planReqs []string, applyReqs []string, importReqs []string, workflow Workflow, allowedOverrides []string, allowCustomWorkflows bool, deleteSourceBranchOnMerge bool, repoLocking bool, configSourceBranch *string) { toLog := make(map[string]string) traceF := func(repoIdx int, repoID string, key string, val interface{}) string { from := "default server config" @@ -519,7 +525,7 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla return fmt.Sprintf("setting %s: %s from %s", key, valStr, from) } - for _, key := range []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, AllowedOverridesKey, AllowCustomWorkflowsKey, DeleteSourceBranchOnMergeKey, RepoLockingKey} { + for _, key := range []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, AllowedOverridesKey, AllowCustomWorkflowsKey, DeleteSourceBranchOnMergeKey, RepoLockingKey, ConfigSourceBranchKey} { for i, repo := range g.Repos { if repo.IDMatches(repoID) { switch key { @@ -563,6 +569,11 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla toLog[RepoLockingKey] = traceF(i, repo.IDString(), RepoLockingKey, *repo.RepoLocking) repoLocking = *repo.RepoLocking } + case ConfigSourceBranchKey: + if repo.ConfigSourceBranch != nil { + toLog[ConfigSourceBranchKey] = traceF(i, repo.IDString(), ConfigSourceBranchKey, *repo.ConfigSourceBranch) + configSourceBranch = repo.ConfigSourceBranch + } } } } @@ -574,7 +585,7 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla } // MatchingRepo returns an instance of Repo which matches a given repoID. -// If multiple repos match, return the last one for consistency with getMatchingCfg. +// If multiple repos match, return the last one for consistency with GetMatchingCfg. func (g GlobalCfg) MatchingRepo(repoID string) *Repo { for i := len(g.Repos) - 1; i >= 0; i-- { repo := g.Repos[i] diff --git a/server/core/db/boltdb.go b/server/core/db/boltdb.go index 14641bf0af..685c672679 100644 --- a/server/core/db/boltdb.go +++ b/server/core/db/boltdb.go @@ -163,7 +163,7 @@ func (b *BoltDB) List() ([]models.ProjectLock, error) { for k, v := range locksBytes { var lock models.ProjectLock if err := json.Unmarshal(v, &lock); err != nil { - return locks, errors.Wrap(err, fmt.Sprintf("failed to deserialize lock at key '%d'", k)) + return locks, errors.Wrap(err, fmt.Sprintf("failed to deserialize lock at key %q", fmt.Sprint(k))) } locks = append(locks, lock) } diff --git a/server/events/github_app_working_dir.go b/server/events/github_app_working_dir.go index 05e2055e33..33900a3510 100644 --- a/server/events/github_app_working_dir.go +++ b/server/events/github_app_working_dir.go @@ -21,7 +21,7 @@ type GithubAppWorkingDir struct { } // Clone writes a fresh token for Github App authentication -func (g *GithubAppWorkingDir) Clone(log logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (g *GithubAppWorkingDir) Clone(log logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string, additionalBranches []string) (string, bool, error) { log.Info("Refreshing git tokens for Github App") @@ -51,5 +51,5 @@ func (g *GithubAppWorkingDir) Clone(log logging.SimpleLogging, headRepo models.R headRepo.CloneURL = strings.Replace(headRepo.CloneURL, "://:", authURL, 1) headRepo.SanitizedCloneURL = strings.Replace(baseRepo.SanitizedCloneURL, "://:", "://x-access-token:", 1) - return g.WorkingDir.Clone(log, headRepo, p, workspace) + return g.WorkingDir.Clone(log, headRepo, p, workspace, additionalBranches) } diff --git a/server/events/github_app_working_dir_test.go b/server/events/github_app_working_dir_test.go index 9598a23044..1478de27c2 100644 --- a/server/events/github_app_working_dir_test.go +++ b/server/events/github_app_working_dir_test.go @@ -48,7 +48,7 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { cloneDir, _, err := gwd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - }, "default") + }, "default", []string{}) Ok(t, err) // Use rev-parse to verify at correct commit. diff --git a/server/events/matchers/slice_of_string.go b/server/events/matchers/slice_of_string.go new file mode 100644 index 0000000000..96f9b24ae2 --- /dev/null +++ b/server/events/matchers/slice_of_string.go @@ -0,0 +1,20 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "reflect" + "github.com/petergtz/pegomock" + +) + +func AnySliceOfString() []string { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]string))(nil)).Elem())) + var nullValue []string + return nullValue +} + +func EqSliceOfString(value []string) []string { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue []string + return nullValue +} diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index 0b12302382..c18703f57b 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -26,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _additionalBranches []string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{_param0, _param1, _param2, _param3} + params := []pegomock.Param{_param0, _param1, _param2, _param3, _additionalBranches} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -169,8 +169,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3} +func (verifier *VerifierMockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _additionalBranches []string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{_param0, _param1, _param2, _param3, _additionalBranches} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -180,12 +180,12 @@ type MockWorkingDir_Clone_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { - _param0, _param1, _param2, _param3 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1] +func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, []string) { + _param0, _param1, _param2, _param3, _additionalBranches := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _additionalBranches[len(_additionalBranches)-1] } -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { +func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _additionalBranches [][]string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) @@ -204,6 +204,10 @@ func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_p for u, param := range params[3] { _param3[u] = param.(string) } + _additionalBranches = make([][]string, len(params[4])) + for u, param := range params[4] { + _additionalBranches[u] = param.([]string) + } } return } diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index b22447042c..566510af02 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -26,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _additionalBranches []string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{_param0, _param1, _param2, _param3} + params := []pegomock.Param{_param0, _param1, _param2, _param3, _additionalBranches} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -169,8 +169,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3} +func (verifier *VerifierMockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _additionalBranches []string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{_param0, _param1, _param2, _param3, _additionalBranches} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -180,12 +180,12 @@ type MockWorkingDir_Clone_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { - _param0, _param1, _param2, _param3 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1] +func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, []string) { + _param0, _param1, _param2, _param3, _additionalBranches := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _additionalBranches[len(_additionalBranches)-1] } -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { +func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _param4 [][]string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) @@ -204,6 +204,10 @@ func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_p for u, param := range params[3] { _param3[u] = param.(string) } + _param4 = make([][]string, len(params[4])) + for u, param := range params[4] { + _param4[u] = param.([]string) + } } return } diff --git a/server/events/post_workflow_hooks_command_runner.go b/server/events/post_workflow_hooks_command_runner.go index 37947ca169..7227d934ce 100644 --- a/server/events/post_workflow_hooks_command_runner.go +++ b/server/events/post_workflow_hooks_command_runner.go @@ -66,7 +66,7 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks( log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(log, headRepo, pull, DefaultWorkspace) + repoDir, _, err := w.WorkingDir.Clone(log, headRepo, pull, DefaultWorkspace, []string{}) if err != nil { return err } diff --git a/server/events/pre_workflow_hooks_command_runner.go b/server/events/pre_workflow_hooks_command_runner.go index 4bcb8aee20..b9bfe50efa 100644 --- a/server/events/pre_workflow_hooks_command_runner.go +++ b/server/events/pre_workflow_hooks_command_runner.go @@ -64,7 +64,7 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(log, headRepo, pull, DefaultWorkspace) + repoDir, _, err := w.WorkingDir.Clone(log, headRepo, pull, DefaultWorkspace, []string{}) if err != nil { return err } diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go index 1069a80480..b168e8b3df 100644 --- a/server/events/project_command_builder.go +++ b/server/events/project_command_builder.go @@ -3,6 +3,7 @@ package events import ( "fmt" "os" + "os/exec" "path/filepath" "sort" "strings" @@ -326,7 +327,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) + repoDir, err := p.prepareWorkspace(ctx, workspace) if err != nil { return nil, err } @@ -508,7 +509,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont defer unlockFn() ctx.Log.Debug("cloning repository") - _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) + _, err = p.prepareWorkspace(ctx, workspace) if err != nil { return pcc, err } @@ -589,6 +590,40 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName } // buildAllProjectCommandsByPlan builds contexts for a command for every project that has +// prepareWorkspace clones new changes into our repository and checks out the appropriate +// version of atlantis.yaml. +func (p *DefaultProjectCommandBuilder) prepareWorkspace(ctx *command.Context, workspace string) (string, error) { + // If we have no repo that matches in our global config then that is ok, as we'll get a + // nil pointer back that will allow us to ignore the config source branch + _, _, _, _, _, _, _, _, configSourceBranch := p.GlobalCfg.GetMatchingCfg(ctx.Log, ctx.Pull.BaseRepo.ID()) + + // If we need to access another branch, ensure we fetch it during our initial clone + additionalBranches := []string{} + if configSourceBranch != nil { + additionalBranches = append(additionalBranches, *configSourceBranch) + } + + repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace, additionalBranches) + if err != nil { + return repoDir, err + } + + // If we've specified a source branch for our atlantis.yaml, checkout the file from that + // branch before continuing with validation. + if configSourceBranch != nil { + ctx.Log.Debug("checking out %s from repos config source branch %s", "atlantis.yaml", *configSourceBranch) + checkoutCmd := exec.Command("git", "checkout", fmt.Sprintf("origin/%s", *configSourceBranch), "--", "atlantis.yaml") + checkoutCmd.Dir = repoDir + output, err := checkoutCmd.CombinedOutput() + if err != nil { + return repoDir, errors.Wrapf(err, "failed to checkout %s from branch %s in %s: %s", "atlantis.yaml", *configSourceBranch, repoDir, string(output)) + } + } + + return repoDir, nil +} + +// buildApplyAllCommands builds apply contexts for every project that has // pending plans in this ctx. func (p *DefaultProjectCommandBuilder) buildAllProjectCommandsByPlan(ctx *command.Context, commentCmd *CommentCommand) ([]command.ProjectContext, error) { // Lock all dirs in this pull request (instead of a single dir) because we diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go index 675f40de60..50b460d0f8 100644 --- a/server/events/project_command_builder_internal_test.go +++ b/server/events/project_command_builder_internal_test.go @@ -624,7 +624,7 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"modules/module/main.tf"}, nil) diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go index 6269781404..3ad252a090 100644 --- a/server/events/project_command_builder_test.go +++ b/server/events/project_command_builder_test.go @@ -135,7 +135,7 @@ projects: }) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmpDir, false, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) if c.AtlantisYAML != "" { @@ -423,7 +423,7 @@ projects: }) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) @@ -791,7 +791,7 @@ projects: tmpDir := DirStructure(t, c.DirStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn(c.ModifiedFiles, nil) @@ -982,7 +982,8 @@ projects: matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), - AnyString())).ThenReturn(repoDir, false, nil) + AnyString(), + AnyStringSlice())).ThenReturn(repoDir, false, nil) When(workingDir.GetWorkingDir( matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), @@ -1069,7 +1070,7 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) { }) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) @@ -1226,7 +1227,8 @@ projects: matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), - AnyString())).ThenReturn(tmpDir, false, nil) + AnyString(), + AnyStringSlice())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir( matchers.AnyModelsRepo(), @@ -1380,7 +1382,7 @@ parallel_plan: true`, }) Ok(t, err) Equals(t, c.ExpectedCtxs, len(actCtxs)) - workingDir.VerifyWasCalled(c.ExpectedClones).Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString()) + workingDir.VerifyWasCalled(c.ExpectedClones).Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice()) } } diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index ef1bd55f6d..eb9197f999 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -414,7 +414,7 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model defer unlockFn() // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, hasDiverged, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, hasDiverged, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace, []string{}) if cloneErr != nil { if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) @@ -522,7 +522,7 @@ func (p *DefaultProjectCommandRunner) doVersion(ctx command.ProjectContext) (ver func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out *models.ImportSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace, []string{}) if cloneErr != nil { return nil, "", cloneErr } @@ -568,7 +568,7 @@ func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out func (p *DefaultProjectCommandRunner) doStateRm(ctx command.ProjectContext) (out *models.StateRmSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace, []string{}) if cloneErr != nil { return nil, "", cloneErr } diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go index 7762010753..87c0f8ae94 100644 --- a/server/events/project_command_runner_test.go +++ b/server/events/project_command_runner_test.go @@ -67,6 +67,7 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), + AnyStringSlice(), )).ThenReturn(repoDir, false, nil) When(mockLocker.TryLock( matchers.AnyLoggingSimpleLogging(), @@ -565,6 +566,7 @@ func TestDefaultProjectCommandRunner_RunEnvSteps(t *testing.T) { matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), + AnyStringSlice(), )).ThenReturn(repoDir, false, nil) When(mockLocker.TryLock( matchers.AnyLoggingSimpleLogging(), diff --git a/server/events/working_dir.go b/server/events/working_dir.go index 10bca552b0..bf258b0602 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -40,7 +40,7 @@ type WorkingDir interface { // absolute path to the root of the cloned repo. It also returns // a boolean indicating if we should warn users that the branch we're // merging into has been updated since we cloned it. - Clone(log logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) + Clone(log logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string, additionalBranches []string) (string, bool, error) // GetWorkingDir returns the path to the workspace for this repo and pull. // If workspace does not exist on disk, error will be of type os.IsNotExist. GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) @@ -80,49 +80,84 @@ type FileWorkspace struct { // If the repo already exists and is at // the right commit it does nothing. This is to support running commands in // multiple dirs of the same repo without deleting existing plans. +// +// By default, our clone is shallow. If you wish to access resources from +// commits other than the pulls base, then provide them as additionalBranches. func (w *FileWorkspace) Clone( log logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, - workspace string) (string, bool, error) { + workspace string, + additionalBranches []string) (string, bool, error) { cloneDir := w.cloneDir(p.BaseRepo, p, workspace) - // If the directory already exists, check if it's at the right commit. - // If so, then we do nothing. - if _, err := os.Stat(cloneDir); err == nil { - log.Debug("clone directory %q already exists, checking if it's at the right commit", cloneDir) - - // We use git rev-parse to see if our repo is at the right commit. - // If just checking out the pull request branch, we can use HEAD. - // If doing a merge, then HEAD won't be at the pull request's HEAD - // because we'll already have performed a merge. Instead, we'll check - // HEAD^2 since that will be the commit before our merge. - pullHead := "HEAD" - if w.CheckoutMerge { - pullHead = "HEAD^2" - } - revParseCmd := exec.Command("git", "rev-parse", pullHead) // #nosec - revParseCmd.Dir = cloneDir - outputRevParseCmd, err := revParseCmd.CombinedOutput() - if err != nil { - log.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) - return cloneDir, false, w.forceClone(log, cloneDir, headRepo, p) + if !w.alreadyClonedHEAD(log, cloneDir, p) { + if err := w.forceClone(log, cloneDir, headRepo, p); err != nil { + return cloneDir, false, err } - currCommit := strings.Trim(string(outputRevParseCmd), "\n") + } - // We're prefix matching here because BitBucket doesn't give us the full - // commit, only a 12 character prefix. - if strings.HasPrefix(currCommit, p.HeadCommit) { - log.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) - return cloneDir, w.warnDiverged(log, p, headRepo, cloneDir), nil + for _, branch := range additionalBranches { + if _, err := w.fetchBranch(log, cloneDir, branch); err != nil { + return cloneDir, false, err } + } + + return cloneDir, w.warnDiverged(log, p, headRepo, cloneDir), nil +} + +// alreadyClonedHEAD determines whether the HEAD commit is already present in the cloneDir +// repository. This can be used to determine whether we should force clone again. +func (w *FileWorkspace) alreadyClonedHEAD(log logging.SimpleLogging, cloneDir string, p models.PullRequest) bool { + // If the directory isn't there or isn't readable, we cannot already be cloned + if _, err := os.Stat(cloneDir); err != nil { + return false + } + + log.Debug("clone directory %q already exists, checking if it's at the right commit", cloneDir) - log.Debug("repo was already cloned but is not at correct commit, wanted %q got %q", p.HeadCommit, currCommit) - // We'll fall through to re-clone. + // We use git rev-parse to see if our repo is at the right commit. + // If just checking out the pull request branch, we can use HEAD. + // If doing a merge, then HEAD won't be at the pull request's HEAD + // because we'll already have performed a merge. Instead, we'll check + // HEAD^2 since that will be the commit before our merge. + pullHead := "HEAD" + if w.CheckoutMerge { + pullHead = "HEAD^2" + } + revParseCmd := exec.Command("git", "rev-parse", pullHead) // #nosec + revParseCmd.Dir = cloneDir + outputRevParseCmd, err := revParseCmd.CombinedOutput() + if err != nil { + log.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) + return false + } + currCommit := strings.Trim(string(outputRevParseCmd), "\n") + // We're prefix matching here because BitBucket doesn't give us the full + // commit, only a 12 character prefix. + if strings.HasPrefix(currCommit, p.HeadCommit) { + log.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) + return true + } + + log.Debug("repo was already cloned but is not at correct commit, wanted %q got %q", p.HeadCommit, currCommit) + return false +} + +// fetchBranch causes the repository to fetch the most recent version of the given branch +// in a shallow fashion. This ensures we can access files from this branch, enabling later +// reading of files from this revision. +func (w *FileWorkspace) fetchBranch(log logging.SimpleLogging, cloneDir, branch string) (string, error) { + log.Debug("fetching branch %s into repository %s", branch, cloneDir) + + fetchCmd := exec.Command("git", "fetch", "--depth=1", "origin", fmt.Sprintf("+refs/heads/%s:refs/remotes/origin/%s", branch, branch)) + fetchCmd.Dir = cloneDir + output, err := fetchCmd.CombinedOutput() + if err != nil { + err = errors.Wrapf(err, "failed to fetch base branch %s: %s", branch, string(output)) } - // Otherwise we clone the repo. - return cloneDir, false, w.forceClone(log, cloneDir, headRepo, p) + return cloneDir, err } // warnDiverged returns true if we should warn the user that the branch we're diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index 79ee18de50..2ff08be81e 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -43,7 +43,7 @@ func TestClone_NoneExisting(t *testing.T) { cloneDir, _, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - }, "default") + }, "default", []string{}) Ok(t, err) // Use rev-parse to verify at correct commit. @@ -92,15 +92,15 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { cloneDir, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "main", - }, "default") + BaseBranch: "master", + }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) // Check the commits. actBaseCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD~1") actHeadCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD^2") - Equals(t, mainCommit, actBaseCommit) + Equals(t, masterCommit, actBaseCommit) Equals(t, branchCommit, actHeadCommit) // Use ls to verify the repo looks good. @@ -112,19 +112,20 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { // the right commit, then we don't reclone. func TestClone_CheckoutMergeNoReclone(t *testing.T) { // Initialize the git repo. - repoDir := initRepo(t) + repoDir, cleanup := initRepo(t) + defer cleanup() - // Add a commit to branch 'branch' that's not on main. + // Add a commit to branch 'branch' that's not on master. runCmd(t, repoDir, "git", "checkout", "branch") runCmd(t, repoDir, "touch", "branch-file") runCmd(t, repoDir, "git", "add", "branch-file") runCmd(t, repoDir, "git", "commit", "-m", "branch-commit") - // Now switch back to main and advance the main branch by another commit. - runCmd(t, repoDir, "git", "checkout", "main") - runCmd(t, repoDir, "touch", "main-file") - runCmd(t, repoDir, "git", "add", "main-file") - runCmd(t, repoDir, "git", "commit", "-m", "main-commit") + // Now switch back to master and advance the master branch by another commit. + runCmd(t, repoDir, "git", "checkout", "master") + runCmd(t, repoDir, "touch", "master-file") + runCmd(t, repoDir, "git", "add", "master-file") + runCmd(t, repoDir, "git", "commit", "-m", "master-commit") // Run the clone for the first time. dataDir := t.TempDir() @@ -140,8 +141,8 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { _, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "main", - }, "default") + BaseBranch: "master", + }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -152,8 +153,8 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { cloneDir, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "main", - }, "default") + BaseBranch: "master", + }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -189,8 +190,8 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { _, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "main", - }, "default") + BaseBranch: "master", + }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -201,8 +202,8 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { cloneDir, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "main", - }, "default") + BaseBranch: "master", + }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -243,8 +244,8 @@ func TestClone_CheckoutMergeConflict(t *testing.T) { _, _, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "main", - }, "default") + BaseBranch: "master", + }, "default", []string{}) ErrContains(t, "running git merge -q --no-ff -m atlantis-merge FETCH_HEAD", err) ErrContains(t, "Auto-merging file", err) @@ -274,7 +275,7 @@ func TestClone_NoReclone(t *testing.T) { cloneDir, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - }, "default") + }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -310,7 +311,7 @@ func TestClone_RecloneWrongCommit(t *testing.T) { BaseRepo: models.Repo{}, HeadBranch: "branch", HeadCommit: expCommit, - }, "default") + }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -377,8 +378,8 @@ func TestClone_MasterHasDiverged(t *testing.T) { _, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", - BaseBranch: "main", - }, "default") + BaseBranch: "master", + }, "default", []string{}) Ok(t, err) Equals(t, hasDiverged, true) @@ -388,8 +389,8 @@ func TestClone_MasterHasDiverged(t *testing.T) { _, hasDiverged, err = wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "second-pr", - BaseBranch: "main", - }, "default") + BaseBranch: "master", + }, "default", []string{}) Ok(t, err) Equals(t, hasDiverged, false) } From e9f43786a42f897bd4728822951a1ab91c1810de Mon Sep 17 00:00:00 2001 From: Dragos Dumitrache Date: Mon, 30 Jan 2023 16:23:54 +0000 Subject: [PATCH 2/7] Execute against pull_request_target (#65) This ensures we're not running our CI against the head of the repository branch Also enable workflow_dispatch --- .circleci/config.yml | 30 ------------------------------ .github/workflows/test.yml | 10 ++++++---- 2 files changed, 6 insertions(+), 34 deletions(-) delete mode 100644 .circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 784adc6896..0000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,30 +0,0 @@ -version: 2 -jobs: - e2e: - docker: - - image: cimg/go:1.19 # If you update this, update it in the Makefile too - environment: - # This version of TF will be downloaded before Atlantis is started. - # We do this instead of setting --default-tf-version because setting - # that flag starts the download asynchronously so we'd have a race - # condition. - # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp - TERRAFORM_VERSION: 1.3.7 - steps: - - checkout - - run: make build-service - # We don't run e2e tests on fork PRs because they don't have access to the secret env vars. - - run: if [ -z "${CIRCLE_PR_REPONAME}" ]; then ./scripts/e2e.sh; fi - -workflows: - version: 2 - branch: - jobs: - - e2e: - context: - - atlantis-e2e-tests - filters: - branches: - # Ignore fork PRs since they don't have access to - # the atlantis-e2e-tests context (and also doc PRs). - ignore: /(pull\/\d+)|(docs\/.*)/ diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 62d22350bb..a40b51a559 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ on: paths: - '**/*.go' - '.github/workflows/test.yml' - pull_request: + pull_request_target: types: - opened - reopened @@ -18,14 +18,16 @@ on: paths: - '**/*.go' - '.github/workflows/test.yml' - + workflow_dispatch: + + concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true jobs: test: - if: github.event.pull_request.draft == false + if: github.event.pull_request_target.draft == false name: runner / gotest runs-on: ubuntu-22.04 container: ghcr.io/runatlantis/testing-env:latest @@ -38,7 +40,7 @@ jobs: # This job builds the website, starts a server to serve it, and then uses # muffet (https://github.com/raviqqe/muffet) to perform the link check. website_link_check: - if: github.event.pull_request.draft == false + if: github.event.pull_request_target.draft == false runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v3 From 75794dbda4e54b0da68556a8f9113b6661c6bd19 Mon Sep 17 00:00:00 2001 From: Dragos Dumitrache Date: Tue, 31 Jan 2023 10:59:15 +0000 Subject: [PATCH 3/7] Execute against pull_request (#66) * Fix failing tests * Regenerate mocks * Use pull request, not pull request target --- .github/workflows/test.yml | 6 ++--- server/events/github_app_working_dir_test.go | 4 ++-- server/events/matchers/slice_of_string.go | 15 ++++++++++-- server/events/mock_workingdir_test.go | 18 +++++++-------- server/events/mocks/mock_working_dir.go | 14 +++++------ ...post_workflow_hooks_command_runner_test.go | 12 +++++----- .../pre_workflow_hooks_command_runner_test.go | 12 +++++----- .../project_command_builder_internal_test.go | 6 ++--- server/events/project_command_builder_test.go | 4 ++-- server/events/project_command_runner_test.go | 1 + server/events/working_dir_test.go | 23 +++++++++---------- 11 files changed, 63 insertions(+), 52 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a40b51a559..f9f7d04678 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ on: paths: - '**/*.go' - '.github/workflows/test.yml' - pull_request_target: + pull_request: types: - opened - reopened @@ -27,7 +27,7 @@ concurrency: jobs: test: - if: github.event.pull_request_target.draft == false + if: github.event.pull_request.draft == false name: runner / gotest runs-on: ubuntu-22.04 container: ghcr.io/runatlantis/testing-env:latest @@ -40,7 +40,7 @@ jobs: # This job builds the website, starts a server to serve it, and then uses # muffet (https://github.com/raviqqe/muffet) to perform the link check. website_link_check: - if: github.event.pull_request_target.draft == false + if: github.event.pull_request.draft == false runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v3 diff --git a/server/events/github_app_working_dir_test.go b/server/events/github_app_working_dir_test.go index 1478de27c2..088ff306b0 100644 --- a/server/events/github_app_working_dir_test.go +++ b/server/events/github_app_working_dir_test.go @@ -86,11 +86,11 @@ func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { modifiedBaseRepo.SanitizedCloneURL = "https://x-access-token:@github.com/runatlantis/atlantis.git" When(credentials.GetToken()).ThenReturn("token", nil) - When(workingDir.Clone(logger, modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default")).ThenReturn( + When(workingDir.Clone(logger, modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default", []string{})).ThenReturn( "", true, nil, ) - _, success, _ := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") + _, success, _ := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default", []string{}) Assert(t, success == true, "clone url mutation error") } diff --git a/server/events/matchers/slice_of_string.go b/server/events/matchers/slice_of_string.go index 96f9b24ae2..f9281819dd 100644 --- a/server/events/matchers/slice_of_string.go +++ b/server/events/matchers/slice_of_string.go @@ -2,9 +2,8 @@ package matchers import ( - "reflect" "github.com/petergtz/pegomock" - + "reflect" ) func AnySliceOfString() []string { @@ -18,3 +17,15 @@ func EqSliceOfString(value []string) []string { var nullValue []string return nullValue } + +func NotEqSliceOfString(value []string) []string { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue []string + return nullValue +} + +func SliceOfStringThat(matcher pegomock.ArgumentMatcher) []string { + pegomock.RegisterMatcher(matcher) + var nullValue []string + return nullValue +} diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index c18703f57b..04bbab0e3c 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -26,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _additionalBranches []string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _param4 []string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{_param0, _param1, _param2, _param3, _additionalBranches} + params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -169,8 +169,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _additionalBranches []string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3, _additionalBranches} +func (verifier *VerifierMockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _param4 []string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -181,11 +181,11 @@ type MockWorkingDir_Clone_OngoingVerification struct { } func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, []string) { - _param0, _param1, _param2, _param3, _additionalBranches := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _additionalBranches[len(_additionalBranches)-1] + _param0, _param1, _param2, _param3, _param4 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _param4[len(_param4)-1] } -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _additionalBranches [][]string) { +func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _param4 [][]string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) @@ -204,9 +204,9 @@ func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_p for u, param := range params[3] { _param3[u] = param.(string) } - _additionalBranches = make([][]string, len(params[4])) + _param4 = make([][]string, len(c.methodInvocations)) for u, param := range params[4] { - _additionalBranches[u] = param.([]string) + _param4[u] = param.([]string) } } return diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index 566510af02..3c1da85f80 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -26,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _additionalBranches []string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _param4 []string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{_param0, _param1, _param2, _param3, _additionalBranches} + params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -169,8 +169,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _additionalBranches []string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3, _additionalBranches} +func (verifier *VerifierMockWorkingDir) Clone(_param0 logging.SimpleLogging, _param1 models.Repo, _param2 models.PullRequest, _param3 string, _param4 []string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -181,8 +181,8 @@ type MockWorkingDir_Clone_OngoingVerification struct { } func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, []string) { - _param0, _param1, _param2, _param3, _additionalBranches := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _additionalBranches[len(_additionalBranches)-1] + _param0, _param1, _param2, _param3, _param4 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _param4[len(_param4)-1] } func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _param4 [][]string) { @@ -204,7 +204,7 @@ func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_p for u, param := range params[3] { _param3[u] = param.(string) } - _param4 = make([][]string, len(params[4])) + _param4 = make([][]string, len(c.methodInvocations)) for u, param := range params[4] { _param4[u] = param.([]string) } diff --git a/server/events/post_workflow_hooks_command_runner_test.go b/server/events/post_workflow_hooks_command_runner_test.go index 1a722666f8..ab3b8d02e7 100644 --- a/server/events/post_workflow_hooks_command_runner_test.go +++ b/server/events/post_workflow_hooks_command_runner_test.go @@ -100,7 +100,7 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{})).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(pCtx, testHook.RunCommand, repoDir)).ThenReturn(result, runtimeDesc, nil) err := postWh.RunPostHooks(ctx, nil) @@ -136,7 +136,7 @@ func TestRunPostHooks_Clone(t *testing.T) { whPostWorkflowHookRunner.VerifyWasCalled(Never()).Run(pCtx, testHook.RunCommand, repoDir) postWhWorkingDirLocker.VerifyWasCalled(Never()).TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, "path") - postWhWorkingDir.VerifyWasCalled(Never()).Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace) + postWhWorkingDir.VerifyWasCalled(Never()).Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{}) }) t.Run("error locking work dir", func(t *testing.T) { postWorkflowHooksSetup(t) @@ -159,7 +159,7 @@ func TestRunPostHooks_Clone(t *testing.T) { err := postWh.RunPostHooks(ctx, nil) Assert(t, err != nil, "error not nil") - postWhWorkingDir.VerifyWasCalled(Never()).Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace) + postWhWorkingDir.VerifyWasCalled(Never()).Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{}) whPostWorkflowHookRunner.VerifyWasCalled(Never()).Run(pCtx, testHook.RunCommand, repoDir) }) @@ -185,7 +185,7 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, errors.New("some error")) + When(postWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{})).ThenReturn(repoDir, false, errors.New("some error")) err := postWh.RunPostHooks(ctx, nil) @@ -217,7 +217,7 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{})).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(runtimematchers.AnyModelsWorkflowHookCommandContext(), EqString(testHook.RunCommand), EqString(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) err := postWh.RunPostHooks(ctx, nil) @@ -255,7 +255,7 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{})).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(runtimematchers.AnyModelsWorkflowHookCommandContext(), EqString(testHook.RunCommand), EqString(repoDir))).ThenReturn(result, runtimeDesc, nil) err := postWh.RunPostHooks(ctx, cmd) diff --git a/server/events/pre_workflow_hooks_command_runner_test.go b/server/events/pre_workflow_hooks_command_runner_test.go index ee1944ba1d..e94214bc1d 100644 --- a/server/events/pre_workflow_hooks_command_runner_test.go +++ b/server/events/pre_workflow_hooks_command_runner_test.go @@ -101,7 +101,7 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{})).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(runtimematchers.AnyModelsWorkflowHookCommandContext(), EqString(testHook.RunCommand), EqString(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, nil) @@ -138,7 +138,7 @@ func TestRunPreHooks_Clone(t *testing.T) { whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(runtimematchers.AnyModelsWorkflowHookCommandContext(), EqString(testHook.RunCommand), EqString(repoDir)) preWhWorkingDirLocker.VerifyWasCalled(Never()).TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, "") - preWhWorkingDir.VerifyWasCalled(Never()).Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace) + preWhWorkingDir.VerifyWasCalled(Never()).Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{}) }) t.Run("error locking work dir", func(t *testing.T) { @@ -162,7 +162,7 @@ func TestRunPreHooks_Clone(t *testing.T) { err := preWh.RunPreHooks(ctx, nil) Assert(t, err != nil, "error not nil") - preWhWorkingDir.VerifyWasCalled(Never()).Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace) + preWhWorkingDir.VerifyWasCalled(Never()).Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{}) whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(runtimematchers.AnyModelsWorkflowHookCommandContext(), EqString(testHook.RunCommand), EqString(repoDir)) }) @@ -188,7 +188,7 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, errors.New("some error")) + When(preWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{})).ThenReturn(repoDir, false, errors.New("some error")) err := preWh.RunPreHooks(ctx, nil) @@ -220,7 +220,7 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{})).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(runtimematchers.AnyModelsWorkflowHookCommandContext(), EqString(testHook.RunCommand), EqString(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) err := preWh.RunPreHooks(ctx, nil) @@ -258,7 +258,7 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDir.Clone(log, testdata.GithubRepo, newPull, events.DefaultWorkspace, []string{})).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(runtimematchers.AnyModelsWorkflowHookCommandContext(), EqString(testHook.RunCommand), EqString(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, cmd) diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go index 50b460d0f8..b0da90f454 100644 --- a/server/events/project_command_builder_internal_test.go +++ b/server/events/project_command_builder_internal_test.go @@ -837,7 +837,7 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(logging_matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(logging_matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -1073,7 +1073,7 @@ workflows: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -1225,7 +1225,7 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"modules/module/main.tf"}, nil) diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go index 3ad252a090..15e9eba550 100644 --- a/server/events/project_command_builder_test.go +++ b/server/events/project_command_builder_test.go @@ -605,7 +605,7 @@ projects: tmpDir := DirStructure(t, c.DirectoryStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn(c.ModifiedFiles, nil) @@ -1396,7 +1396,7 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(matchers.AnyLoggingSimpleLogging(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), AnyStringSlice())).ThenReturn(tmpDir, false, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go index 87c0f8ae94..391cfd9015 100644 --- a/server/events/project_command_runner_test.go +++ b/server/events/project_command_runner_test.go @@ -722,6 +722,7 @@ func TestDefaultProjectCommandRunner_Import(t *testing.T) { matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString(), + AnyStringSlice(), )).ThenReturn(repoDir, false, nil) if c.setup != nil { c.setup(repoDir, ctx, mockLocker, mockInit, mockImport) diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index 2ff08be81e..cb98d2dea1 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -92,7 +92,7 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { cloneDir, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "master", + BaseBranch: "main", }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -100,7 +100,7 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { // Check the commits. actBaseCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD~1") actHeadCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD^2") - Equals(t, masterCommit, actBaseCommit) + Equals(t, mainCommit, actBaseCommit) Equals(t, branchCommit, actHeadCommit) // Use ls to verify the repo looks good. @@ -112,8 +112,7 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { // the right commit, then we don't reclone. func TestClone_CheckoutMergeNoReclone(t *testing.T) { // Initialize the git repo. - repoDir, cleanup := initRepo(t) - defer cleanup() + repoDir := initRepo(t) // Add a commit to branch 'branch' that's not on master. runCmd(t, repoDir, "git", "checkout", "branch") @@ -122,7 +121,7 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { runCmd(t, repoDir, "git", "commit", "-m", "branch-commit") // Now switch back to master and advance the master branch by another commit. - runCmd(t, repoDir, "git", "checkout", "master") + runCmd(t, repoDir, "git", "checkout", "main") runCmd(t, repoDir, "touch", "master-file") runCmd(t, repoDir, "git", "add", "master-file") runCmd(t, repoDir, "git", "commit", "-m", "master-commit") @@ -141,7 +140,7 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { _, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "master", + BaseBranch: "main", }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -153,7 +152,7 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { cloneDir, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "master", + BaseBranch: "main", }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -190,7 +189,7 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { _, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "master", + BaseBranch: "main", }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -202,7 +201,7 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { cloneDir, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "master", + BaseBranch: "main", }, "default", []string{}) Ok(t, err) Equals(t, false, hasDiverged) @@ -244,7 +243,7 @@ func TestClone_CheckoutMergeConflict(t *testing.T) { _, _, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", - BaseBranch: "master", + BaseBranch: "main", }, "default", []string{}) ErrContains(t, "running git merge -q --no-ff -m atlantis-merge FETCH_HEAD", err) @@ -378,7 +377,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { _, hasDiverged, err := wd.Clone(logging.NewNoopLogger(t), models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", - BaseBranch: "master", + BaseBranch: "main", }, "default", []string{}) Ok(t, err) Equals(t, hasDiverged, true) @@ -389,7 +388,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { _, hasDiverged, err = wd.Clone(logging.NewNoopLogger(t), models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "second-pr", - BaseBranch: "master", + BaseBranch: "main", }, "default", []string{}) Ok(t, err) Equals(t, hasDiverged, false) From 1fce57df5d6b51c97c7177787587396d3c636ac6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Naveiras?= Date: Fri, 26 May 2023 15:05:45 +0100 Subject: [PATCH 4/7] Update terraform version Removed versions that we aren't use, and the last minor release for each major release. ``` $ cat atlantis.yaml|grep terraform_version |uniq|sort terraform_version: v0.11.15 terraform_version: v0.13.7 terraform_version: v1.3.7 ``` --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9415dd3519..9a60212af7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,11 +29,11 @@ ARG TARGETPLATFORM # install terraform binaries # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ENV DEFAULT_TERRAFORM_VERSION=1.3.7 +ENV DEFAULT_TERRAFORM_VERSION=1.4.6 # In the official Atlantis image we only have the latest of each Terraform version. SHELL ["/bin/bash", "-o", "pipefail", "-c"] -RUN AVAILABLE_TERRAFORM_VERSIONS="0.8.8 0.9.11 0.10.8 0.11.14 0.11.15 0.12.29 0.12.31 0.13.2 0.13.7 1.0.11 1.1.9 1.2.9 ${DEFAULT_TERRAFORM_VERSION}" && \ +RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1.1.9 1.2.9 1.3.7 ${DEFAULT_TERRAFORM_VERSION}" && \ case "${TARGETPLATFORM}" in \ "linux/amd64") TERRAFORM_ARCH=amd64 ;; \ "linux/arm64") TERRAFORM_ARCH=arm64 ;; \ From 062e9fa2a920883a51316ac03c1aa9ca6a3ef395 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Naveiras?= Date: Fri, 26 May 2023 15:20:38 +0100 Subject: [PATCH 5/7] Remove default terraform version This is the safest way to ensure that we're using the correct version of terraform when we're using utopia terraform Includes an update on conftest --- Dockerfile | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9a60212af7..324962a122 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,11 +29,10 @@ ARG TARGETPLATFORM # install terraform binaries # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ENV DEFAULT_TERRAFORM_VERSION=1.4.6 # In the official Atlantis image we only have the latest of each Terraform version. SHELL ["/bin/bash", "-o", "pipefail", "-c"] -RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1.1.9 1.2.9 1.3.7 ${DEFAULT_TERRAFORM_VERSION}" && \ +RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1.1.9 1.2.9 1.3.7 1.4.6" && \ case "${TARGETPLATFORM}" in \ "linux/amd64") TERRAFORM_ARCH=amd64 ;; \ "linux/arm64") TERRAFORM_ARCH=arm64 ;; \ @@ -49,11 +48,10 @@ RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1. ln -s "/usr/local/bin/tf/versions/${VERSION}/terraform" "/usr/local/bin/terraform${VERSION}" && \ rm "terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" && \ rm "terraform_${VERSION}_SHA256SUMS"; \ - done && \ - ln -s "/usr/local/bin/tf/versions/${DEFAULT_TERRAFORM_VERSION}/terraform" /usr/local/bin/terraform + done # renovate: datasource=github-releases depName=open-policy-agent/conftest -ENV DEFAULT_CONFTEST_VERSION=0.38.0 +ENV DEFAULT_CONFTEST_VERSION=0.42.1 RUN AVAILABLE_CONFTEST_VERSIONS="${DEFAULT_CONFTEST_VERSION}" && \ case "${TARGETPLATFORM}" in \ From 785834e42ad5648b0751dd6a62d40195f732d355 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Naveiras?= Date: Wed, 31 May 2023 22:52:22 +0100 Subject: [PATCH 6/7] Revert "Remove default terraform version" --- Dockerfile | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 324962a122..9a60212af7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,10 +29,11 @@ ARG TARGETPLATFORM # install terraform binaries # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp +ENV DEFAULT_TERRAFORM_VERSION=1.4.6 # In the official Atlantis image we only have the latest of each Terraform version. SHELL ["/bin/bash", "-o", "pipefail", "-c"] -RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1.1.9 1.2.9 1.3.7 1.4.6" && \ +RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1.1.9 1.2.9 1.3.7 ${DEFAULT_TERRAFORM_VERSION}" && \ case "${TARGETPLATFORM}" in \ "linux/amd64") TERRAFORM_ARCH=amd64 ;; \ "linux/arm64") TERRAFORM_ARCH=arm64 ;; \ @@ -48,10 +49,11 @@ RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1. ln -s "/usr/local/bin/tf/versions/${VERSION}/terraform" "/usr/local/bin/terraform${VERSION}" && \ rm "terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" && \ rm "terraform_${VERSION}_SHA256SUMS"; \ - done + done && \ + ln -s "/usr/local/bin/tf/versions/${DEFAULT_TERRAFORM_VERSION}/terraform" /usr/local/bin/terraform # renovate: datasource=github-releases depName=open-policy-agent/conftest -ENV DEFAULT_CONFTEST_VERSION=0.42.1 +ENV DEFAULT_CONFTEST_VERSION=0.38.0 RUN AVAILABLE_CONFTEST_VERSIONS="${DEFAULT_CONFTEST_VERSION}" && \ case "${TARGETPLATFORM}" in \ From 31ccb2b3e58187f30fcdcfbb1361e39498ccb73e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Naveiras?= Date: Wed, 12 Jul 2023 08:24:37 +0100 Subject: [PATCH 7/7] Removed default terraform configuration Atlantis server requires a default tf version, however, this setting here also has the side effect to have a default version in utopia terraform. This is not safe, as you might updating the tf state version by mistake. Move this setting to the atlantis runtime config, so we check and run only the relevant terraform version in case of manual intervention --- Dockerfile | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9a60212af7..b9312e4bb7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -27,13 +27,9 @@ FROM ${ATLANTIS_BASE}:${ATLANTIS_BASE_TAG_DATE}-${ATLANTIS_BASE_TAG_TYPE} AS bas # Get the architecture the image is being built for ARG TARGETPLATFORM -# install terraform binaries -# renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ENV DEFAULT_TERRAFORM_VERSION=1.4.6 - # In the official Atlantis image we only have the latest of each Terraform version. SHELL ["/bin/bash", "-o", "pipefail", "-c"] -RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1.1.9 1.2.9 1.3.7 ${DEFAULT_TERRAFORM_VERSION}" && \ +RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1.1.9 1.2.9 1.3.7 1.4.6 1.5.2" && \ case "${TARGETPLATFORM}" in \ "linux/amd64") TERRAFORM_ARCH=amd64 ;; \ "linux/arm64") TERRAFORM_ARCH=arm64 ;; \ @@ -49,11 +45,10 @@ RUN AVAILABLE_TERRAFORM_VERSIONS="0.11.15 0.12.31 0.13.7 0.14.9 0.15.5 1.0.10 1. ln -s "/usr/local/bin/tf/versions/${VERSION}/terraform" "/usr/local/bin/terraform${VERSION}" && \ rm "terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" && \ rm "terraform_${VERSION}_SHA256SUMS"; \ - done && \ - ln -s "/usr/local/bin/tf/versions/${DEFAULT_TERRAFORM_VERSION}/terraform" /usr/local/bin/terraform + done # renovate: datasource=github-releases depName=open-policy-agent/conftest -ENV DEFAULT_CONFTEST_VERSION=0.38.0 +ENV DEFAULT_CONFTEST_VERSION=0.44.1 RUN AVAILABLE_CONFTEST_VERSIONS="${DEFAULT_CONFTEST_VERSION}" && \ case "${TARGETPLATFORM}" in \