diff --git a/.dockerignore b/.dockerignore index 72ec49f7c8..874eeae12f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,3 +1,8 @@ * +!cmd/ +!server/ +!testdrive/ +!main.go +!go.mod +!go.sum !docker-entrypoint.sh -!atlantis diff --git a/.github/workflows/atlantis-base.yml b/.github/workflows/atlantis-base.yml index cece12ec7a..abfcce26b9 100644 --- a/.github/workflows/atlantis-base.yml +++ b/.github/workflows/atlantis-base.yml @@ -4,6 +4,7 @@ on: push: paths: - 'docker-base/**' + - '.github/workflows/atlantis-base.yml' branches: - "master" workflow_dispatch: @@ -17,6 +18,16 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + with: + image: tonistiigi/binfmt:latest + platforms: arm64,arm + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Login to Packages Container registry uses: docker/login-action@v1 with: @@ -24,11 +35,13 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - run: echo "TODAY=$(date +"%Y.%m.%d")" >> $GITHUB_ENV - - name: build atlantis-base:${{env.TODAY}} image - run: | - docker build -t ghcr.io/runatlantis/atlantis-base:${{env.TODAY}} . - - name: publish atlantis-base:${{env.TODAY}} image - run: | - docker push ghcr.io/runatlantis/atlantis-base:${{env.TODAY}} - docker tag ghcr.io/runatlantis/atlantis-base:${{env.TODAY}} ghcr.io/runatlantis/atlantis-base:latest - docker push ghcr.io/runatlantis/atlantis-base:latest + + - name: Build and push atlantis-base:${{env.TODAY}} image + uses: docker/build-push-action@v2 + with: + context: docker-base + platforms: linux/arm64/v8,linux/amd64,linux/arm/v7 + push: true + tags: | + ghcr.io/runatlantis/atlantis-base:${{env.TODAY}} + ghcr.io/runatlantis/atlantis-base:latest diff --git a/.github/workflows/atlantis-image.yml b/.github/workflows/atlantis-image.yml index 6b769dd3aa..14830bc199 100644 --- a/.github/workflows/atlantis-image.yml +++ b/.github/workflows/atlantis-image.yml @@ -17,6 +17,16 @@ jobs: - uses: actions/setup-go@v2 with: go-version: 1.17 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + with: + image: tonistiigi/binfmt:latest + platforms: arm64,arm + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Login to Packages Container registry uses: docker/login-action@v1 with: @@ -25,28 +35,27 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} # Publish dev image to container registry - - name: build atlantis:dev image + - name: Build and push atlantis:dev image if: ${{ github.event_name == 'push'}} - run: | - make build-service - docker build -t ghcr.io/runatlantis/atlantis:dev . - - name: publish atlantis:dev image - if: ${{ github.event_name == 'push'}} - run: | - docker push ghcr.io/runatlantis/atlantis:dev + uses: docker/build-push-action@v2 + with: + context: . + platforms: linux/arm64/v8,linux/amd64,linux/arm/v7 + push: true + tags: | + ghcr.io/runatlantis/atlantis:dev # Publish release to container registry - name: populate release version if: ${{ github.event_name == 'release'}} run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - name: build atlantis:$RELEASE_VERSION release image + - name: Build and push atlantis:$RELEASE_VERSION image if: ${{ github.event_name == 'release'}} - run: | - make build-service - docker build -t ghcr.io/runatlantis/atlantis:$RELEASE_VERSION . - - name: publish atlantis:$RELEASE_VERSION release image - if: ${{ github.event_name == 'release'}} - run: | - docker push ghcr.io/runatlantis/atlantis:$RELEASE_VERSION - docker tag ghcr.io/runatlantis/atlantis:$RELEASE_VERSION ghcr.io/runatlantis/atlantis:latest - docker push ghcr.io/runatlantis/atlantis:latest + uses: docker/build-push-action@v2 + with: + context: . + platforms: linux/arm64/v8,linux/amd64,linux/arm/v7 + push: true + tags: | + ghcr.io/runatlantis/atlantis:$RELEASE_VERSION + ghcr.io/runatlantis/atlantis:latest diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 2cf420a1ef..29a5aab838 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -14,18 +14,18 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} tool_name: golangci-lint - # Use golint via golangci-lint binary with "warning" level. - golint: - name: runner / golint + # Use revive via golangci-lint binary with "warning" level. + revive: + name: runner / revive runs-on: ubuntu-latest steps: - name: Check out code into the Go module directory uses: actions/checkout@v1 - - name: golint + - name: revive uses: reviewdog/action-golangci-lint@v2 with: - golangci_lint_flags: "--disable-all -E golint" - tool_name: golint # Change reporter name. + golangci_lint_flags: "--disable-all -E revive" + tool_name: revive # Change reporter name. level: warning # GitHub Status Check won't become failure with this level. # You can add more and more supported linters with different config. diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000000..599f57a336 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,26 @@ +name: release + +on: + push: + tags: + - v*.*.* + +jobs: + goreleaser: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + submodules: true + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.17 + - name: Run GoReleaser + uses: goreleaser/goreleaser-action@v2 + with: + version: v0.183.0 + args: release --rm-dist + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/testing-env-image.yml b/.github/workflows/testing-env-image.yml index 41502d5352..290491b259 100644 --- a/.github/workflows/testing-env-image.yml +++ b/.github/workflows/testing-env-image.yml @@ -3,33 +3,41 @@ name: testing-env-image on: push: paths: - - 'testing/**' - - '.github/workflows/testing-env-image.yml' + - "testing/**" + - ".github/workflows/testing-env-image.yml" branches: - - 'master' + - "master" workflow_dispatch: -defaults: - run: - working-directory: testing - jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + with: + image: tonistiigi/binfmt:latest + platforms: arm64,arm + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Login to Packages Container registry uses: docker/login-action@v1 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - run: echo "TODAY=$(date +"%Y.%m.%d")" >> $GITHUB_ENV - - name: build testing-env:${{env.TODAY}} image - run: | - docker build -t ghcr.io/runatlantis/testing-env:${{env.TODAY}} . - - name: publish testing-env:${{env.TODAY}} image - run: | - docker push ghcr.io/runatlantis/testing-env:${{env.TODAY}} - docker tag ghcr.io/runatlantis/testing-env:${{env.TODAY}} ghcr.io/runatlantis/testing-env:latest - docker push ghcr.io/runatlantis/testing-env:latest + - name: Build and push testing-env:${{env.TODAY}} image + uses: docker/build-push-action@v2 + with: + context: testing + platforms: linux/arm64/v8,linux/amd64,linux/arm/v7 + push: true + tags: | + ghcr.io/runatlantis/testing-env:${{env.TODAY}} + ghcr.io/runatlantis/testing-env:latest diff --git a/.gitignore b/.gitignore index c23cbfed9c..fd0a0c95ac 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,7 @@ helm/test-values.yaml *.swp golangci-lint atlantis +.devcontainer + +# gitreleaser +dist/ diff --git a/.golangci.yml b/.golangci.yml index 0630766f93..e403447ec5 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -6,7 +6,7 @@ linters: # We don't use goconst because it gives false positives in the tests. # - goconst - gofmt - - golint + - revive - gosec - gosimple - ineffassign diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 0000000000..248d424d80 --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,34 @@ +env: + - CGO_ENABLED=0 +builds: + - targets: + - darwin_amd64 + - darwin_arm64 + - linux_386 + - linux_amd64 + - linux_arm + - linux_arm64 + - windows_386 + - windows_amd64 + +archives: + - id: zip + name_template: "{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}" + format: zip + files: + - none* + +checksum: + name_template: 'checksums.txt' + +changelog: + skip: true + +release: + github: + owner: runatlantis + name: atlantis + draft: true + +snapshot: + name_template: "{{ incpatch .Version }}-next" diff --git a/CHANGELOG.md b/CHANGELOG.md index c6ca34ecfa..db6a03a50a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,86 @@ +# v0.18.0 + +Feature release of adding capability of streaming terraform logs, also added the capability of supporting tf 1.0.x (which was missed in the v0.17.6 release). + +## What's Changed + +* deps: terraform 1.1.2 by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1952 +* build(deps): bump github.com/spf13/viper from 1.10.0 to 1.10.1 by @dependabot in https://github.com/runatlantis/atlantis/pull/1956 +* Dockerfile: Add support for last Terraform 1.0.x version in AVAILABLE_TERRAFORM_VERSIONS by @javierbeaumont in https://github.com/runatlantis/atlantis/pull/1957 +* feat: add GitHub team allowlist configuration option by @paulerickson in https://github.com/runatlantis/atlantis/pull/1694 +* fix: fallback to default TF version in apply step by @sapslaj in https://github.com/runatlantis/atlantis/pull/1931 +* docs: typo in heading level by @moretea in https://github.com/runatlantis/atlantis/pull/1960 +* docs: clarify example for `--azuredevops-token` flag by @MarkIannucci in https://github.com/runatlantis/atlantis/pull/1712 +* docs: update github docs links by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1964 +* build(deps): bump github.com/hashicorp/go-getter from 1.5.9 to 1.5.10 by @dependabot in https://github.com/runatlantis/atlantis/pull/1961 +* feat: streaming terraform logs in real-time by @Aayyush in https://github.com/runatlantis/atlantis/pull/1937 + +# v0.17.6 + +## What's Changed + +* docs: clarify maximum version limit by @tomharrisonjr in https://github.com/runatlantis/atlantis/pull/1894 +* fix: allow requests to /healthz without authentication by @wendtek in https://github.com/runatlantis/atlantis/pull/1896 +* docs: document approve_policies command in comment_parser by @dupuy26 in https://github.com/runatlantis/atlantis/pull/1886 +* feat: adds `allowed_regexp_prefixes` parameter to use with the `--enable-regexp-cmd` flag by @bmbferreira in https://github.com/runatlantis/atlantis/pull/1884 +* refactor: Add PullStatusFetcher interface by @nishkrishnan in https://github.com/runatlantis/atlantis/pull/1904 +* build(deps): bump github.com/urfave/negroni from 0.3.0 to 1.0.0 by @dependabot in https://github.com/runatlantis/atlantis/pull/1922 +* build(deps): bump github.com/xanzy/go-gitlab from 0.51.1 to 0.52.2 by @dependabot in https://github.com/runatlantis/atlantis/pull/1921 +* build(deps): bump github.com/golang-jwt/jwt/v4 from 4.1.0 to 4.2.0 by @dependabot in https://github.com/runatlantis/atlantis/pull/1928 +* docs: add clarity and further policy_check examples by @DaveHewy in https://github.com/runatlantis/atlantis/pull/1925 +* build(deps): bump github.com/spf13/viper from 1.9.0 to 1.10.0 by @dependabot in https://github.com/runatlantis/atlantis/pull/1934 +* deps: terraform 1.1.1 by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1939 +* deps: alpine 3.15 by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1941 +* docs: fix policy check documentation examples by @DaveHewy in https://github.com/runatlantis/atlantis/pull/1945 +* docker: make multi-platform atlantis image by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1943 + +# v0.17.5 + +## What's Changed + +* refactor: move from io/ioutil to io and os package by @Juneezee in https://github.com/runatlantis/atlantis/pull/1843 +* chore: use golang-jwt/jwt to replace dgrijalva/jwt-go by @barn in https://github.com/runatlantis/atlantis/pull/1845 +* fix(azure): allow host to be specified in user_config for on premise installation by @dandcg in https://github.com/runatlantis/atlantis/pull/1860 +* feat: filter out atlantis/apply from mergeability clause by @nishkrishnan in https://github.com/runatlantis/atlantis/pull/1856 +* feat: add BasicAuth Support to Atlantis ServeHTTP by @fblgit in https://github.com/runatlantis/atlantis/pull/1777 +* fix(azure): allow correct path to be derived for on premise installation by @dandcg in https://github.com/runatlantis/atlantis/pull/1863 +* feat: add new bitbucket server webhook event type pr:from_ref_updated(#198) by @kuzm1ch in https://github.com/runatlantis/atlantis/pull/1866 +* Move runtime common under existing runtime package. by @nishkrishnan in https://github.com/runatlantis/atlantis/pull/1875 +* feat: use goreleaser to replace the binary-release script by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1873 + +# v0.17.4 + +## What's Changed + +* build(deps): bump tar from 4.4.15 to 4.4.19 by @dependabot in https://github.com/runatlantis/atlantis/pull/1783 +* build: tf 1.0.6 by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1786 +* Bump testing image conftest version to 0.27 by @nishkrishnan in https://github.com/runatlantis/atlantis/pull/1787 +* Actually bump testing image conftest version to 0.27 by @nishkrishnan in https://github.com/runatlantis/atlantis/pull/1788 +* build: fix testing-env img process by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1789 +* e2e: update dockerfile by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1790 +* build(deps): bump runatlantis/atlantis-base from 2021.06.22 to 2021.08.31 by @dependabot in https://github.com/runatlantis/atlantis/pull/1794 +* build(deps): bump github.com/xanzy/go-gitlab from 0.50.3 to 0.50.4 by @dependabot in https://github.com/runatlantis/atlantis/pull/1795 +* fix a log error typo by @danpilch in https://github.com/runatlantis/atlantis/pull/1796 +* Set ParallelPolicyCheckEnabled to the same value as ParallelPlanEnabled by @msarvar in https://github.com/runatlantis/atlantis/pull/1802 +* docs: Add missing --silence-vcs-status-no-plans flag by @franklad in https://github.com/runatlantis/atlantis/pull/1803 +* build(lint): use revive instead of golint by @minamijoyo in https://github.com/runatlantis/atlantis/pull/1801 +* build(deps): bump github.com/hashicorp/go-getter from 1.5.7 to 1.5.8 by @dependabot in https://github.com/runatlantis/atlantis/pull/1807 +* build(deps): bump go.uber.org/zap from 1.19.0 to 1.19.1 by @dependabot in https://github.com/runatlantis/atlantis/pull/1808 +* docs: add missing the `branch` key in the reference for server side repo config by @minamijoyo in https://github.com/runatlantis/atlantis/pull/1784 +* build: tf 1.0.7 by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1811 +* deps: conftest 0.28.0 by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1819 +* deps: conftest 0.28.1 by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1826 +* build(deps): bump prismjs from 1.24.0 to 1.25.0 by @dependabot in https://github.com/runatlantis/atlantis/pull/1823 +* Updating client interface and adding ApprovalStatus model by @Aayyush in https://github.com/runatlantis/atlantis/pull/1827 +* Fix title level by @xiao-pp in https://github.com/runatlantis/atlantis/pull/1822 +* build(deps): bump github.com/xanzy/go-gitlab from 0.50.4 to 0.51.1 by @dependabot in https://github.com/runatlantis/atlantis/pull/1831 +* Add support for deleting a branch on merge in BitBucket Server by @wpbeckwith in https://github.com/runatlantis/atlantis/pull/1792 +* deps: tf 1.0.8 by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1837 +* build(deps): bump github.com/spf13/viper from 1.8.1 to 1.9.0 by @dependabot in https://github.com/runatlantis/atlantis/pull/1821 +* Document --auto-merge-disabled option by @dupuy26 in https://github.com/runatlantis/atlantis/pull/1838 +* testdrive: update terraformVersion by @chenrui333 in https://github.com/runatlantis/atlantis/pull/1839 +* Improve github pull request call retries by @aristocrates in https://github.com/runatlantis/atlantis/pull/1810 + # v0.17.3 Feature release with a number of improvements related to Gitlab support, a new command, better formatting etc. Some broken features have been fixed in along with some regressions. diff --git a/Dockerfile b/Dockerfile index 0d161ed2a1..4ea275b433 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,11 +1,19 @@ +# Stage 1: build artifact +FROM golang:1.17-alpine AS builder + +WORKDIR /app +COPY . /app +RUN CGO_ENABLED=0 go build -trimpath -ldflags "-s -w" -v -o atlantis . + +# Stage 2 # The runatlantis/atlantis-base is created by docker-base/Dockerfile. -FROM ghcr.io/runatlantis/atlantis-base:2021.06.22 +FROM ghcr.io/runatlantis/atlantis-base:2021.12.15 AS base # install terraform binaries -ENV DEFAULT_TERRAFORM_VERSION=1.0.6 +ENV DEFAULT_TERRAFORM_VERSION=1.1.2 # In the official Atlantis image we only have the latest of each Terraform version. -RUN AVAILABLE_TERRAFORM_VERSIONS="0.8.8 0.9.11 0.10.8 0.11.15 0.12.31 0.13.7 0.14.11 0.15.5 ${DEFAULT_TERRAFORM_VERSION}" && \ +RUN AVAILABLE_TERRAFORM_VERSIONS="0.8.8 0.9.11 0.10.8 0.11.15 0.12.31 0.13.7 0.14.11 0.15.5 1.0.11 ${DEFAULT_TERRAFORM_VERSION}" && \ for VERSION in ${AVAILABLE_TERRAFORM_VERSIONS}; do \ curl -LOs https://releases.hashicorp.com/terraform/${VERSION}/terraform_${VERSION}_linux_amd64.zip && \ curl -LOs https://releases.hashicorp.com/terraform/${VERSION}/terraform_${VERSION}_SHA256SUMS && \ @@ -18,7 +26,7 @@ RUN AVAILABLE_TERRAFORM_VERSIONS="0.8.8 0.9.11 0.10.8 0.11.15 0.12.31 0.13.7 0.1 done && \ ln -s /usr/local/bin/tf/versions/${DEFAULT_TERRAFORM_VERSION}/terraform /usr/local/bin/terraform -ENV DEFAULT_CONFTEST_VERSION=0.25.0 +ENV DEFAULT_CONFTEST_VERSION=0.28.3 RUN AVAILABLE_CONFTEST_VERSIONS="${DEFAULT_CONFTEST_VERSION}" && \ for VERSION in ${AVAILABLE_CONFTEST_VERSIONS}; do \ @@ -35,7 +43,7 @@ RUN AVAILABLE_CONFTEST_VERSIONS="${DEFAULT_CONFTEST_VERSION}" && \ RUN ln -s /usr/local/bin/cft/versions/${DEFAULT_CONFTEST_VERSION}/conftest /usr/local/bin/conftest # copy binary -COPY atlantis /usr/local/bin/atlantis +COPY --from=builder /app/atlantis /usr/local/bin/atlantis # copy docker entrypoint COPY docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh diff --git a/Dockerfile.dev b/Dockerfile.dev index 7726bb08d0..607274679e 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,6 +1,6 @@ FROM ghcr.io/runatlantis/atlantis:latest COPY atlantis /usr/local/bin/atlantis # TODO: remove this once we get this in the base image -ENV DEFAULT_CONFTEST_VERSION=0.27.0 +ENV DEFAULT_CONFTEST_VERSION=0.28.3 WORKDIR /atlantis/src diff --git a/cmd/server.go b/cmd/server.go index cd869dbd0f..40cb82613f 100644 --- a/cmd/server.go +++ b/cmd/server.go @@ -41,6 +41,7 @@ const ( ADWebhookUserFlag = "azuredevops-webhook-user" ADTokenFlag = "azuredevops-token" // nolint: gosec ADUserFlag = "azuredevops-user" + ADHostnameFlag = "azuredevops-hostname" AllowForkPRsFlag = "allow-fork-prs" AllowRepoConfigFlag = "allow-repo-config" AtlantisURLFlag = "atlantis-url" @@ -63,6 +64,7 @@ const ( EnableRegExpCmdFlag = "enable-regexp-cmd" EnableDiffMarkdownFormat = "enable-diff-markdown-format" GHHostnameFlag = "gh-hostname" + GHTeamAllowlistFlag = "gh-team-allowlist" GHTokenFlag = "gh-token" GHUserFlag = "gh-user" GHAppIDFlag = "gh-app-id" @@ -102,15 +104,20 @@ const ( TFEHostnameFlag = "tfe-hostname" TFETokenFlag = "tfe-token" WriteGitCredsFlag = "write-git-creds" + WebBasicAuthFlag = "web-basic-auth" + WebUsernameFlag = "web-username" + WebPasswordFlag = "web-password" // NOTE: Must manually set these as defaults in the setDefaults function. DefaultADBasicUser = "" DefaultADBasicPassword = "" + DefaultADHostname = "dev.azure.com" DefaultAutoplanFileList = "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl" DefaultCheckoutStrategy = "branch" DefaultBitbucketBaseURL = bitbucketcloud.BaseURL DefaultDataDir = "~/.atlantis" DefaultGHHostname = "github.com" + DefaultGHTeamAllowlist = "*:plan,*:apply" DefaultGitlabHostname = "gitlab.com" DefaultLogLevel = "info" DefaultParallelPoolSize = 15 @@ -118,6 +125,9 @@ const ( DefaultTFDownloadURL = "https://releases.hashicorp.com" DefaultTFEHostname = "app.terraform.io" DefaultVCSStatusName = "atlantis" + DefaultWebBasicAuth = false + DefaultWebUsername = "atlantis" + DefaultWebPassword = "atlantis" ) var stringFlags = map[string]stringFlag{ @@ -139,6 +149,10 @@ var stringFlags = map[string]stringFlag{ description: "Azure DevOps basic HTTP authentication username for inbound webhooks.", defaultValue: "", }, + ADHostnameFlag: { + description: "Azure DevOps hostname to support cloud and self hosted instances.", + defaultValue: "dev.azure.com", + }, AtlantisURLFlag: { description: "URL that Atlantis can be reached at. Defaults to http://$(hostname):$port where $port is from --" + PortFlag + ". Supports a base path ex. https://example.com/basepath.", }, @@ -187,6 +201,18 @@ var stringFlags = map[string]stringFlag{ description: "Hostname of your Github Enterprise installation. If using github.com, no need to set.", defaultValue: DefaultGHHostname, }, + GHTeamAllowlistFlag: { + description: "Comma separated list of key-value pairs representing the GitHub teams and the operations that " + + "the members of a particular team are allowed to perform. " + + "The format is {team}:{command},{team}:{command}. " + + "Valid values for 'command' are 'plan', 'apply' and '*', e.g. 'dev:plan,ops:apply,devops:*'" + + "This example gives the users from the 'dev' GitHub team the permissions to execute the 'plan' command, " + + "the 'ops' team the permissions to execute the 'apply' command, " + + "and allows the 'devops' team to perform any operation. If this argument is not provided, the default value (*:*) " + + "will be used and the default behavior will be to not check permissions " + + "and to allow users from any team to perform any operation.", + defaultValue: DefaultGHTeamAllowlist, + }, GHUserFlag: { description: "GitHub username of API user.", defaultValue: "", @@ -281,6 +307,14 @@ var stringFlags = map[string]stringFlag{ description: "Name used to identify Atlantis for pull request statuses.", defaultValue: DefaultVCSStatusName, }, + WebUsernameFlag: { + description: "Username used for Web Basic Authentication on Atlantis HTTP Middleware", + defaultValue: DefaultWebUsername, + }, + WebPasswordFlag: { + description: "Password used for Web Basic Authentication on Atlantis HTTP Middleware", + defaultValue: DefaultWebPassword, + }, } var boolFlags = map[string]boolFlag{ @@ -379,6 +413,10 @@ var boolFlags = map[string]boolFlag{ description: "Skips cloning the PR repo if there are no projects were changed in the PR.", defaultValue: false, }, + WebBasicAuthFlag: { + description: "Switches on or off the Basic Authentication on the HTTP Middleware interface", + defaultValue: DefaultWebBasicAuth, + }, } var intFlags = map[string]intFlag{ ParallelPoolSize: { @@ -589,6 +627,9 @@ func (s *ServerCmd) run() error { } func (s *ServerCmd) setDefaults(c *server.UserConfig) { + if c.AzureDevOpsHostname == "" { + c.AzureDevOpsHostname = DefaultADHostname + } if c.AutoplanFileList == "" { c.AutoplanFileList = DefaultAutoplanFileList } @@ -622,9 +663,18 @@ func (s *ServerCmd) setDefaults(c *server.UserConfig) { if c.VCSStatusName == "" { c.VCSStatusName = DefaultVCSStatusName } + if c.GithubTeamAllowlist == "" { + c.GithubTeamAllowlist = DefaultGHTeamAllowlist + } if c.TFEHostname == "" { c.TFEHostname = DefaultTFEHostname } + if c.WebUsername == "" { + c.WebUsername = DefaultWebUsername + } + if c.WebPassword == "" { + c.WebPassword = DefaultWebPassword + } } func (s *ServerCmd) validate(userConfig server.UserConfig) error { diff --git a/cmd/server_test.go b/cmd/server_test.go index 388c79541c..95d82aa31e 100644 --- a/cmd/server_test.go +++ b/cmd/server_test.go @@ -15,7 +15,6 @@ package cmd import ( "fmt" - "io/ioutil" "os" "path/filepath" "reflect" @@ -837,12 +836,12 @@ func setupWithDefaults(flags map[string]interface{}, t *testing.T) *cobra.Comman } func tempFile(t *testing.T, contents string) string { - f, err := ioutil.TempFile("", "") + f, err := os.CreateTemp("", "") Ok(t, err) newName := f.Name() + ".yaml" err = os.Rename(f.Name(), newName) Ok(t, err) - ioutil.WriteFile(newName, []byte(contents), 0600) // nolint: errcheck + os.WriteFile(newName, []byte(contents), 0600) // nolint: errcheck return newName } diff --git a/docker-base/Dockerfile b/docker-base/Dockerfile index 89313733c4..a8758499ba 100644 --- a/docker-base/Dockerfile +++ b/docker-base/Dockerfile @@ -1,7 +1,7 @@ # This Dockerfile builds our base image with gosu, dumb-init and the atlantis # user. We split this from the main Dockerfile because this base doesn't change # and also because it kept breaking the build due to flakiness. -FROM alpine:3.14 +FROM alpine:3.15 LABEL authors="Anubhav Mishra, Luke Kysow" # We use gosu to step down from root and run as the atlantis user so we need diff --git a/e2e/e2e.go b/e2e/e2e.go index ee44a2a73a..5addf721e0 100644 --- a/e2e/e2e.go +++ b/e2e/e2e.go @@ -15,7 +15,6 @@ package main import ( "fmt" - "io/ioutil" "log" "os" "os/exec" @@ -78,7 +77,7 @@ func (t *E2ETester) Start() (*E2EResult, error) { randomData := []byte(testFileData) filePath := fmt.Sprintf("%s/%s/%s", cloneDir, t.projectType.Name, testFileName) log.Printf("creating file to commit %q", filePath) - err := ioutil.WriteFile(filePath, randomData, 0644) + err := os.WriteFile(filePath, randomData, 0644) if err != nil { return e2eResult, fmt.Errorf("couldn't write file %s: %v", filePath, err) } diff --git a/go.mod b/go.mod index 3b72ef27c3..c67cda8439 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,7 @@ go 1.17 replace google.golang.org/grpc => google.golang.org/grpc v1.29.1 require ( - cloud.google.com/go v0.81.0 // indirect + cloud.google.com/go v0.99.0 // indirect cloud.google.com/go/storage v1.10.0 // indirect github.com/Laisky/graphql v1.0.5 github.com/Masterminds/goutils v1.1.1 // indirect @@ -16,33 +16,32 @@ require ( github.com/aws/aws-sdk-go v1.31.15 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect - github.com/bradleyfalzon/ghinstallation v1.1.1 + github.com/bradleyfalzon/ghinstallation/v2 v2.0.3 github.com/briandowns/spinner v0.0.0-20170614154858-48dbb65d7bd5 github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc - github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/docker/docker v0.0.0-20180620051407-e2593239d949 github.com/elazarl/go-bindata-assetfs v1.0.1 - github.com/fatih/color v1.7.0 // indirect + github.com/fatih/color v1.13.0 // indirect github.com/flynn-archive/go-shlex v0.0.0-20150515145356-3f9db97f8568 - github.com/fsnotify/fsnotify v1.4.10-0.20200417215612-7f4cf4dd2b52 // indirect + github.com/fsnotify/fsnotify v1.5.1 // indirect github.com/go-ozzo/ozzo-validation v0.0.0-20170913164239-85dcd8368eba github.com/go-playground/locales v0.12.1 // indirect github.com/go-playground/universal-translator v0.16.0 // indirect - github.com/go-test/deep v1.0.7 - github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect + github.com/go-test/deep v1.0.8 + github.com/golang-jwt/jwt/v4 v4.2.0 + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.2 // indirect - github.com/google/go-cmp v0.5.5 // indirect - github.com/google/go-github/v29 v29.0.2 // indirect + github.com/google/go-cmp v0.5.6 // indirect github.com/google/go-github/v31 v31.0.0 - github.com/google/go-querystring v1.0.0 // indirect + github.com/google/go-querystring v1.1.0 // indirect github.com/google/uuid v1.1.2-0.20200519141726-cb32006e483f // indirect - github.com/googleapis/gax-go/v2 v2.0.5 // indirect + github.com/googleapis/gax-go/v2 v2.1.1 // indirect github.com/gorilla/css v1.0.0 // indirect github.com/gorilla/mux v1.8.0 - github.com/gorilla/websocket v1.4.2 // indirect + github.com/gorilla/websocket v1.4.2 github.com/hashicorp/go-cleanhttp v0.5.2 // indirect - github.com/hashicorp/go-getter v1.5.7 + github.com/hashicorp/go-getter v1.5.10 github.com/hashicorp/go-retryablehttp v0.6.8 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect github.com/hashicorp/go-version v1.3.0 @@ -53,27 +52,26 @@ require ( github.com/imdario/mergo v0.3.11 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/jmespath/go-jmespath v0.3.1-0.20200310193758-2437e8417af5 // indirect - github.com/jstemmer/go-junit-report v0.9.1 // indirect github.com/klauspost/compress v1.11.2 // indirect github.com/leodido/go-urn v1.2.0 // indirect github.com/lusis/slack-test v0.0.0-20190426140909-c40012f20018 // indirect github.com/magiconair/properties v1.8.5 // indirect - github.com/mattn/go-colorable v0.0.9 // indirect - github.com/mattn/go-isatty v0.0.4 // indirect - github.com/mcdafydd/go-azuredevops v0.12.0 - github.com/microcosm-cc/bluemonday v1.0.15 + github.com/mattn/go-colorable v0.1.12 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect + github.com/mcdafydd/go-azuredevops v0.12.1 + github.com/microcosm-cc/bluemonday v1.0.16 github.com/mitchellh/colorstring v0.0.0-20150917214807-8631ce90f286 github.com/mitchellh/copystructure v1.0.0 // indirect github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/go-testing-interface v1.0.0 // indirect github.com/mitchellh/go-wordwrap v1.0.0 // indirect - github.com/mitchellh/mapstructure v1.4.1 // indirect + github.com/mitchellh/mapstructure v1.4.3 // indirect github.com/mitchellh/reflectwalk v1.0.0 // indirect github.com/mohae/deepcopy v0.0.0-20170603005431-491d3605edfb github.com/nlopes/slack v0.4.0 github.com/onsi/ginkgo v1.14.0 // indirect github.com/onsi/gomega v1.10.1 // indirect - github.com/pelletier/go-toml v1.9.3 // indirect + github.com/pelletier/go-toml v1.9.4 // indirect github.com/petergtz/pegomock v2.9.0+incompatible github.com/pkg/errors v0.9.1 github.com/pmezard/go-difflib v1.0.0 // indirect @@ -85,42 +83,45 @@ require ( github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect github.com/sirupsen/logrus v1.6.1-0.20200528085638-6699a89a232f // indirect github.com/spf13/afero v1.6.0 // indirect - github.com/spf13/cast v1.3.1 // indirect + github.com/spf13/cast v1.4.1 // indirect github.com/spf13/cobra v0.0.0-20170905172051-b78744579491 github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/pflag v1.0.5 - github.com/spf13/viper v1.8.1 + github.com/spf13/viper v1.10.1 github.com/stretchr/testify v1.7.0 github.com/subosito/gotenv v1.2.0 // indirect github.com/ulikunitz/xz v0.5.8 // indirect github.com/urfave/cli v1.22.5 - github.com/urfave/negroni v0.3.0 - github.com/xanzy/go-gitlab v0.50.3 + github.com/urfave/negroni v1.0.0 + github.com/xanzy/go-gitlab v0.52.2 github.com/zclconf/go-cty v1.5.1 // indirect go.etcd.io/bbolt v1.3.6 go.opencensus.io v0.23.0 // indirect go.uber.org/atomic v1.7.0 // indirect go.uber.org/multierr v1.6.0 // indirect - go.uber.org/zap v1.19.0 - golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 - golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect - golang.org/x/mod v0.4.2 // indirect - golang.org/x/net v0.0.0-20210614182718-04defd469f4e // indirect - golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602 // indirect - golang.org/x/sys v0.0.0-20210510120138-977fb7262007 // indirect - golang.org/x/text v0.3.6 // indirect + go.uber.org/zap v1.19.1 + golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d // indirect + golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect + golang.org/x/sys v0.0.0-20211210111614-af8b64212486 // indirect + golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 + golang.org/x/text v0.3.7 // indirect golang.org/x/time v0.0.0-20191024005414-555d28b269f0 // indirect - golang.org/x/tools v0.1.2 // indirect - golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect - google.golang.org/api v0.44.0 // indirect + google.golang.org/api v0.63.0 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c // indirect - google.golang.org/grpc v1.38.0 // indirect - google.golang.org/protobuf v1.26.0 // indirect + google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa // indirect + google.golang.org/grpc v1.43.0 // indirect + google.golang.org/protobuf v1.27.1 // indirect gopkg.in/go-playground/assert.v1 v1.2.1 // indirect gopkg.in/go-playground/validator.v9 v9.31.0 - gopkg.in/ini.v1 v1.62.0 // indirect + gopkg.in/ini.v1 v1.66.2 // indirect gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect gotest.tools v2.2.0+incompatible // indirect ) + +require ( + github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect + github.com/google/go-github/v39 v39.1.0 // indirect + golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect +) diff --git a/go.sum b/go.sum index 996b50bba4..fe897e6741 100644 --- a/go.sum +++ b/go.sum @@ -16,8 +16,16 @@ cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKP cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0 h1:at8Tk2zUz63cLPR0JPWm5vp77pEZmzxEQBEfRKn1VV8= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.99.0 h1:y/cM2iqGgGi5D5DQZl6D9STN/3dR/Vx5Mp8s752oJTY= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -26,7 +34,7 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -40,6 +48,7 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/Laisky/graphql v1.0.5 h1:8eJ7mrXKVkKxZ+Nw1HPs3iQPVNxXGctysqTEY0lNBlc= github.com/Laisky/graphql v1.0.5/go.mod h1:ITUrUa/tkyD3MezVt4FKGGIGZokhG13kP8sImV86I1o= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= @@ -52,6 +61,10 @@ github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0= @@ -60,7 +73,9 @@ github.com/apparentlymart/go-textseg/v12 v12.0.0 h1:bNEQyAGak9tojivJNkoqWErVCQbj github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ= github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM= @@ -70,20 +85,31 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuP github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= -github.com/bradleyfalzon/ghinstallation v1.1.1 h1:pmBXkxgM1WeF8QYvDLT5kuQiHMcmf+X015GI0KM/E3I= -github.com/bradleyfalzon/ghinstallation v1.1.1/go.mod h1:vyCmHTciHx/uuyN82Zc3rXN3X2KTK8nUTCrTMwAhcug= +github.com/bradleyfalzon/ghinstallation/v2 v2.0.3 h1:ywF/8q+GVpvlsEuvRb1SGSDQDUxntW1d4kFu/9q/YAE= +github.com/bradleyfalzon/ghinstallation/v2 v2.0.3/go.mod h1:tlgi+JWCXnKFx/Y4WtnDbZEINo31N5bcvnCoqieefmk= github.com/briandowns/spinner v0.0.0-20170614154858-48dbb65d7bd5 h1:osZyZB7J4kE1tKLeaUjV6+uZVBfS835T0I/RxmwWw1w= github.com/briandowns/spinner v0.0.0-20170614154858-48dbb65d7bd5/go.mod h1:hw/JEQBIE+c/BLI4aKM8UU8v+ZqrD3h7HC27kKt8JQU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= @@ -92,26 +118,32 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/docker/docker v0.0.0-20180620051407-e2593239d949 h1:La/qO5ApRpiO4c0wGWFs4YB/HdobJHArySoQZfXtaUQ= github.com/docker/docker v0.0.0-20180620051407-e2593239d949/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/elazarl/go-bindata-assetfs v1.0.1 h1:m0kkaHRKEu7tUIUFVwhGGGYClXvyl4RE03qmvRTNfbw= github.com/elazarl/go-bindata-assetfs v1.0.1/go.mod h1:v+YaWX3bdea5J/mo8dSETolEo7R71Vk1u8bnjau5yw4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= +github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/flynn-archive/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BMXYYRWTLOJKlh+lOBt6nUQgXAfB7oVIQt5cNreqSLI= github.com/flynn-archive/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:rZfgFAXFS/z/lEd6LJmf9HVZ1LkgYiHx5pHhV5DR16M= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.4.10-0.20200417215612-7f4cf4dd2b52 h1:0NmERxogGTU8hgzOhRKNoKivtBZkDW29GeuJtK9e0sc= -github.com/fsnotify/fsnotify v1.4.10-0.20200417215612-7f4cf4dd2b52/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= +github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-ozzo/ozzo-validation v0.0.0-20170913164239-85dcd8368eba h1:P0TvLfAFQ/hc8Q+VBsrgzGv52DxTjAu199VHbAI4LLQ= github.com/go-ozzo/ozzo-validation v0.0.0-20170913164239-85dcd8368eba/go.mod h1:gsEKFIVnabGBt6mXmxK0MoFy+cZoTJY6mu5Ll3LVLBU= github.com/go-playground/locales v0.12.1 h1:2FITxuFt/xuCNP1Acdhv62OzaCiviiE4kotfhkmOqEc= @@ -119,16 +151,22 @@ github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3yg github.com/go-playground/universal-translator v0.16.0 h1:X++omBR/4cE2MNg91AoC3rmGrCjJ8eAeUP/K/EKx4DM= github.com/go-playground/universal-translator v0.16.0/go.mod h1:1AnU7NaIRDWWzGEKwgtJRd2xk99HeFyHw3yid4rvQIY= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/go-test/deep v1.0.7 h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M= -github.com/go-test/deep v1.0.7/go.mod h1:QV8Hv/iy04NyLBxAdO9njL0iVPN1S4d/A3NVv1V36o8= +github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= +github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.2.0 h1:besgBTC8w8HjP6NzQdxwKH9Z5oQMZ24ThTrHp3cZ8eU= +github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= @@ -137,6 +175,7 @@ github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -156,6 +195,7 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -168,20 +208,24 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-github/v29 v29.0.2 h1:opYN6Wc7DOz7Ku3Oh4l7prmkOMwEcQxpFtxdU8N8Pts= -github.com/google/go-github/v29 v29.0.2/go.mod h1:CHKiKKPHJ0REzfwc14QMklvtHwCveD0PxlMjLlzAM5E= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-github/v31 v31.0.0 h1:JJUxlP9lFK+ziXKimTCprajMApV1ecWD4NB6CCb0plo= github.com/google/go-github/v31 v31.0.0/go.mod h1:NQPZol8/1sMoWYGN2yaALIBytu17gAWfhbweiEed3pM= -github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= +github.com/google/go-github/v39 v39.0.0/go.mod h1:C1s8C5aCC9L+JXIYpJM5GYytdX52vC1bLvHEF1IhBrE= +github.com/google/go-github/v39 v39.1.0 h1:1vf4gM0D1e+Df2HMxaYC3+o9+Huj3ywGTtWc3VVYaDA= +github.com/google/go-github/v39 v39.1.0/go.mod h1:C1s8C5aCC9L+JXIYpJM5GYytdX52vC1bLvHEF1IhBrE= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0 h1:wCKgOCHuUEVfsaQLpPSJb7VdYCdTVZQAuOdYm1yc/60= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1 h1:d8MncMlErDFTwQGBK1xhv026j9kqhvw1Qv9IbWT1VLQ= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -193,15 +237,18 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2-0.20200519141726-cb32006e483f h1:qa1wFcvZzVLbFVPdsdTsWL6k5IP6BEmFmd9SeahRQ5s= github.com/google/uuid v1.1.2-0.20200519141726-cb32006e483f/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1 h1:dp3bWCh+PPO1zjRRiCSczJav13sBvG4UhNyVTa1KqdU= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= @@ -210,22 +257,28 @@ github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0U github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/graph-gophers/graphql-go v0.0.0-20200309224638-dae41bde9ef9/go.mod h1:9CQHMSxwO4MprSdzoIEobiHpoLtHm77vfxsvsIN5Vuc= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= +github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-getter v1.5.7 h1:HBLsom8eGHLxj78ta+/MVSyct8KWG4B4z6lhBA4vJcg= -github.com/hashicorp/go-getter v1.5.7/go.mod h1:BrrV/1clo8cCYu6mxvboYg+KutTiFnXjMEgDD8+i7ZI= -github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI= +github.com/hashicorp/go-getter v1.5.10 h1:EN9YigTlv5Ola0IuleFzQGuaYPPHHtWusP/5AypWEMs= +github.com/hashicorp/go-getter v1.5.10/go.mod h1:9i48BP6wpWweI/0/+FBjqLrp9S8XtwUGjiu0QkWHEaY= github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.0.0 h1:bkKf0BeBXcSYa7f5Fyi9gMuQ8gNsxeiNpZjR6VxNZeo= +github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-retryablehttp v0.6.8 h1:92lWxgpa+fF3FozM4B3UZtHZMJX8T5XT+TFdCxsPyWs= github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= @@ -235,9 +288,9 @@ github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.3.0 h1:McDWVJIU/y+u1BRV06dPaLfLCaT7fUTJLp5r04x7iNw= github.com/hashicorp/go-version v1.3.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= @@ -245,14 +298,15 @@ github.com/hashicorp/hcl/v2 v2.0.0/go.mod h1:oVVDG71tEinNGYCxinCYadcmKU9bglqW9pV github.com/hashicorp/hcl/v2 v2.6.0 h1:3krZOfGY6SziUXa6H9PJU6TyohHn7I+ARYnhbeNBz+o= github.com/hashicorp/hcl/v2 v2.6.0/go.mod h1:bQTN5mpo+jewjJgh8jr0JUguIi7qPHUF6yIfAEN3jqY= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= github.com/hashicorp/terraform-config-inspect v0.0.0-20200806211835-c481b8bfa41e h1:wIsEsIITggCC4FTO9PisDjy561UU7OPL6uTu7tnkHH8= github.com/hashicorp/terraform-config-inspect v0.0.0-20200806211835-c481b8bfa41e/go.mod h1:Z0Nnk4+3Cy89smEbrq+sl1bxc9198gIP4I7wcQF6Kqs= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/huandu/xstrings v1.3.1 h1:4jgBlKK6tLKFvO8u5pmYjG91cqytmDCDvGh7ECVFfFs= github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA= @@ -263,19 +317,24 @@ github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= github.com/jmespath/go-jmespath v0.3.1-0.20200310193758-2437e8417af5 h1:1G6l+WClVmbflmgW0Wsr6a50KeKCQcYKv/vUjtQUHuw= github.com/jmespath/go-jmespath v0.3.1-0.20200310193758-2437e8417af5/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.11.2 h1:MiK62aErc3gIiVEtyzKfeOHgW7atJb5g/KNX5m3c2nQ= github.com/klauspost/compress v1.11.2/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= @@ -286,20 +345,32 @@ github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/lusis/slack-test v0.0.0-20190426140909-c40012f20018 h1:MNApn+Z+fIT4NPZopPfCc1obT6aY3SVM6DOctz1A9ZU= github.com/lusis/slack-test v0.0.0-20190426140909-c40012f20018/go.mod h1:sFlOUpQL1YcjhFVXhg1CG8ZASEs/Mf1oVb6H75JL/zg= +github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mcdafydd/go-azuredevops v0.12.0 h1:CmG9uheFF6M3WnSykVNVLxR7zXrtg4p3pE2/lNDnPEE= -github.com/mcdafydd/go-azuredevops v0.12.0/go.mod h1:B4UDyn7WEj1/97f45j3VnzEfkWKe05+/dCcAPdOET4A= -github.com/microcosm-cc/bluemonday v1.0.15 h1:J4uN+qPng9rvkBZBoBb8YGR+ijuklIMpSOZZLjYpbeY= -github.com/microcosm-cc/bluemonday v1.0.15/go.mod h1:ZLvAzeakRwrGnzQEvstVzVt3ZpqOF2+sdFr0Om+ce30= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mcdafydd/go-azuredevops v0.12.1 h1:WxwLVyGuJ8oL7uWQp1/J6GefX1wMQQZUHWRGsrm+uE8= +github.com/mcdafydd/go-azuredevops v0.12.1/go.mod h1:B4UDyn7WEj1/97f45j3VnzEfkWKe05+/dCcAPdOET4A= +github.com/microcosm-cc/bluemonday v1.0.16 h1:kHmAq2t7WPWLjiGvzKa5o3HzSfahUKiOq7fAPUiMNIc= +github.com/microcosm-cc/bluemonday v1.0.16/go.mod h1:Z0r70sCuXHig8YpBzCc5eGHAap2K7e/u082ZUpDRRqM= +github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= +github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= github.com/mitchellh/colorstring v0.0.0-20150917214807-8631ce90f286 h1:KHyL+3mQOF9sPfs26lsefckcFNDcIZtiACQiECzIUkw= github.com/mitchellh/colorstring v0.0.0-20150917214807-8631ce90f286/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ= @@ -312,19 +383,20 @@ github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eI github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170603005431-491d3605edfb h1:e+l77LJOEqXTIQihQJVkA6ZxPOUmfPM5e4H7rcpgtSk= github.com/mohae/deepcopy v0.0.0-20170603005431-491d3605edfb/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nlopes/slack v0.4.0 h1:OVnHm7lv5gGT5gkcHsZAyw++oHVFihbjWbL3UceUpiA= github.com/nlopes/slack v0.4.0/go.mod h1:jVI4BBK3lSktibKahxBF74txcK2vyvkza1z/+rRnVAM= github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= @@ -338,10 +410,12 @@ github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= -github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= +github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/petergtz/pegomock v2.9.0+incompatible h1:BKfb5XfkJfehe5T+O1xD4Zm26Sb9dnRj7tHxLYwUPiI= github.com/petergtz/pegomock v2.9.0+incompatible/go.mod h1:nuBLWZpVyv/fLo56qTwt/AUau7jgouO1h7bEvZCq82o= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -349,7 +423,19 @@ github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E= github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= @@ -357,6 +443,7 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= @@ -368,16 +455,16 @@ github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f h1:tygelZueB1EtXk github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg= github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.1-0.20200528085638-6699a89a232f h1:qqqIhBDFUBrbMezIyJkKWIpf+E5CdObleGMjW1s19Hg= github.com/sirupsen/logrus v1.6.1-0.20200528085638-6699a89a232f/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= +github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.0-20170905172051-b78744579491 h1:XOya2OGpG7Q4gS4MYHRoFSTlBGnZD40X+Kw2ikFQFXE= github.com/spf13/cobra v0.0.0-20170905172051-b78744579491/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= @@ -386,9 +473,10 @@ github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44= -github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/spf13/viper v1.10.1 h1:nuJZuYpG7gTj/XqiUwg8bA0cp1+M2mC3J4g5luUYBKk= +github.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -398,15 +486,16 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/ulikunitz/xz v0.5.8 h1:ERv8V6GKqVi23rgu5cj9pVfVzJbOqAY2Ntl88O6c2nQ= github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU= github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/negroni v0.3.0 h1:PaXOb61mWeZJxc1Ji2xJjpVg9QfPo0rrB+lHyBxGNSU= -github.com/urfave/negroni v0.3.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= +github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= +github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= -github.com/xanzy/go-gitlab v0.50.3 h1:M7ncgNhCN4jaFNyXxarJhCLa9Qi6fdmCxFFhMTQPZiY= -github.com/xanzy/go-gitlab v0.50.3/go.mod h1:Q+hQhV508bDPoBijv7YjK/Lvlb4PhVhJdKqXVQrUoAE= +github.com/xanzy/go-gitlab v0.52.2 h1:gkgg1z4ON70sphibtD86Bfmt1qV3mZ0pU0CBBCFAEvQ= +github.com/xanzy/go-gitlab v0.52.2/go.mod h1:Q+hQhV508bDPoBijv7YjK/Lvlb4PhVhJdKqXVQrUoAE= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -418,9 +507,9 @@ github.com/zclconf/go-cty v1.5.1 h1:oALUZX+aJeEBUe2a1+uD2+UTaYfEjnKFDEMRydkGvWE= github.com/zclconf/go-cty v1.5.1/go.mod h1:nHzOclRkoj++EU9ZjSrZvRG0BXIWt8c7loYc0qXAFGQ= go.etcd.io/bbolt v1.3.6 h1:/ecaJf0sk1l4l6V4awd65v2C3ILy7MSj+s/x1ADCIMU= go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -429,26 +518,30 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/goleak v1.1.10 h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0= -go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= +go.uber.org/goleak v1.1.11-0.20210813005559-691160354723 h1:sHOAIxRGBp443oHZIPB+HsUGaksVCXVQENPxwTfQdH4= +go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -go.uber.org/zap v1.19.0 h1:mZQZefskPPCMIBCSEH0v2/iUqqLrYtaeqwD6FUGUnFE= -go.uber.org/zap v1.19.0/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +go.uber.org/zap v1.19.1 h1:ue41HOKd1vGURxrmeKIgELGb3jPW9DMUDGtsinblHwI= +go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200403201458-baeed622b8d8/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 h1:7I4JAnoQBe7ZtJcBaYHi5UtiO8tQHbUSXxL+pnGRANg= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -471,7 +564,6 @@ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRu golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= @@ -483,13 +575,12 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -497,9 +588,11 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -523,8 +616,11 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d h1:LO7XpTYMwTqxjLcGWPijK3vRXg1aWdlNOVOHRq45d7c= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -538,8 +634,13 @@ golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602 h1:0Ja1LBD+yisY6RWM/BH7TJVXWsSjs2VwBSmvSX4HdBc= -golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 h1:RerP+noqYHUQ8CMRcPlC2nvTa4dcBIjegkuWdcUDuqg= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -552,11 +653,14 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -564,14 +668,19 @@ golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -592,14 +701,29 @@ golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486 h1:5hpz5aRr+W1erYCL5JRhSUBJRph7l9XkNveoExlrKYk= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -608,8 +732,9 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs= @@ -619,7 +744,6 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3 golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -627,10 +751,9 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -664,8 +787,11 @@ golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.2 h1:kRBLX7v7Af8W7Gdbbc908OJcdgtK8bOz9Uaj8/F1ACA= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -692,8 +818,18 @@ google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34q google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.44.0 h1:URs6qR1lAxDsqWITsQXI4ZkGiYJ5dHtRNiCpfs2OeKA= -google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.63.0 h1:n2bqqK895ygnBpdPDYetfy23K7fJ22wsrZKCyfuRkkA= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -742,10 +878,31 @@ google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c h1:wtujag7C+4D6KMoulW9YauvK2lgdvCMS260jsqqBXr0= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa h1:I0YcKz0I7OAhddo7ya8kMnvprhcWM045PmkBdMO9zN0= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/grpc v1.29.1 h1:EC2SB8S04d2r73uptxphDSUG+kTKVgjRPF+N3xpxRB4= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -757,11 +914,15 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= @@ -769,13 +930,15 @@ gopkg.in/go-playground/assert.v1 v1.2.1 h1:xoYuJVE7KT85PYWrN730RguIQO0ePzVRfFMXa gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= gopkg.in/go-playground/validator.v9 v9.31.0 h1:bmXmP2RSNtFES+bn4uYuHT7iJFJv7Vj+an+ZQdDaD1M= gopkg.in/go-playground/validator.v9 v9.31.0/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ= -gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU= -gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.66.2 h1:XfR1dOYubytKy4Shzc2LHrrGhU0lDCfDGG1yLPmpgsI= +gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= diff --git a/kustomize/bundle.yaml b/kustomize/bundle.yaml index 3c2f4e56e3..8795a8c21c 100644 --- a/kustomize/bundle.yaml +++ b/kustomize/bundle.yaml @@ -22,7 +22,7 @@ spec: fsGroup: 1000 # Atlantis group (1000) read/write access to volumes. containers: - name: atlantis - image: ghcr.io/runatlantis/atlantis:v0.17.3 + image: ghcr.io/runatlantis/atlantis:v0.18.0 env: - name: ATLANTIS_DATA_DIR value: /atlantis diff --git a/main.go b/main.go index bbde2aa491..45e17e53db 100644 --- a/main.go +++ b/main.go @@ -22,7 +22,7 @@ import ( "github.com/spf13/viper" ) -const atlantisVersion = "0.17.3" +const atlantisVersion = "0.18.0" func main() { v := viper.New() diff --git a/runatlantis.io/docs/apply-requirements.md b/runatlantis.io/docs/apply-requirements.md index 63ec3fe736..7e03df6b0e 100644 --- a/runatlantis.io/docs/apply-requirements.md +++ b/runatlantis.io/docs/apply-requirements.md @@ -87,6 +87,13 @@ You can set the `mergeable` requirement by: #### Meaning Each VCS provider has a different concept of "mergeability": + +::: warning +Some VCS providers have a feature for branch protection to control "mergeability". If you want to use it, +you probably need to limit the base branch not to bypass the branch protection. +See also the `branch` keyword in [Server Side Repo Config](server-side-repo-config.html#reference) for more details. +::: + #### GitHub In GitHub, if you're not using [Protected Branches](https://help.github.com/articles/about-protected-branches/) then all pull requests are mergeable unless there is a conflict. @@ -161,7 +168,7 @@ You can set the `undiverged` requirement by: - dir: . apply_requirements: [undiverged] ``` -### Meaning +#### Meaning The `merge` checkout strategy creates a temporary merge commit and runs the `plan` on the Atlantis local version of the PR source and destination branch. The local destination branch can become out of date since changes to the destination branch are not fetched if there are no changes to the source branch. `undiverged` enforces that Atlantis local version of master is up to date diff --git a/runatlantis.io/docs/automerging.md b/runatlantis.io/docs/automerging.md index 440e8b9b04..b0f4cdfe61 100644 --- a/runatlantis.io/docs/automerging.md +++ b/runatlantis.io/docs/automerging.md @@ -21,6 +21,10 @@ Automerging can be enabled either by: to be configured under the `projects` key. ::: +## How to Disable +If automerge is enabled, you can disable it for a single `atlantis apply` +command with the `--auto-merge-disabled` option. + ## All Plans Must Succeed When automerge is enabled, **all plans** in a pull request **must succeed** before **any** plans can be applied. diff --git a/runatlantis.io/docs/configuring-webhooks.md b/runatlantis.io/docs/configuring-webhooks.md index ac98bcc8d2..c1686a3e79 100644 --- a/runatlantis.io/docs/configuring-webhooks.md +++ b/runatlantis.io/docs/configuring-webhooks.md @@ -11,7 +11,7 @@ See the instructions for your specific provider below. [[toc]] ## GitHub/GitHub Enterprise -You can install your webhook at the [organization](https://help.github.com/articles/differences-between-user-and-organization-accounts/) level, or for each individual repository. +You can install your webhook at the [organization](https://docs.github.com/en/get-started/learning-about-github/types-of-github-accounts) level, or for each individual repository. ::: tip NOTE If only some of the repos in your organization are to be managed by Atlantis, then you @@ -119,6 +119,6 @@ Repeat the process above until you have webhook subscriptions for the following ## Next Steps * To verify that Atlantis is receiving your webhooks, create a test pull request - to your repo. + to your repo. * You should see the request show up in the Atlantis logs at an `INFO` level. * You'll now need to configure Atlantis to add your [Provider Credentials](provider-credentials.md) diff --git a/runatlantis.io/docs/custom-workflows.md b/runatlantis.io/docs/custom-workflows.md index a8e34bcb5a..2253e221e2 100644 --- a/runatlantis.io/docs/custom-workflows.md +++ b/runatlantis.io/docs/custom-workflows.md @@ -128,19 +128,19 @@ workflows: myworkflow: plan: steps: - - run: terraform init -input=false -no-color + - run: terraform init -input=false # If you're using workspaces you need to select the workspace using the # $WORKSPACE environment variable. - - run: terraform workspace select -no-color $WORKSPACE + - run: terraform workspace select $WORKSPACE # You MUST output the plan using -out $PLANFILE because Atlantis expects # plans to be in a specific location. - - run: terraform plan -input=false -refresh -no-color -out $PLANFILE + - run: terraform plan -input=false -refresh -out $PLANFILE apply: steps: # Again, you must use the $PLANFILE environment variable. - - run: terraform apply -no-color $PLANFILE + - run: terraform apply $PLANFILE ``` ### Terragrunt @@ -176,14 +176,14 @@ workflows: - env: name: TERRAGRUNT_TFPATH command: 'echo "terraform${ATLANTIS_TERRAFORM_VERSION}"' - - run: terragrunt plan -no-color -out=$PLANFILE - - run: terragrunt show -no-color -json $PLANFILE > $SHOWFILE + - run: terragrunt plan -out=$PLANFILE + - run: terragrunt show -json $PLANFILE > $SHOWFILE apply: steps: - env: name: TERRAGRUNT_TFPATH command: 'echo "terraform${ATLANTIS_TERRAFORM_VERSION}"' - - run: terragrunt apply -no-color $PLANFILE + - run: terragrunt apply $PLANFILE ``` If using the repo's `atlantis.yaml` file you would use the following config: @@ -201,13 +201,13 @@ workflows: - env: name: TERRAGRUNT_TFPATH command: 'echo "terraform${ATLANTIS_TERRAFORM_VERSION}"' - - run: terragrunt plan -no-color -out $PLANFILE + - run: terragrunt plan -out $PLANFILE apply: steps: - env: name: TERRAGRUNT_TFPATH command: 'echo "terraform${ATLANTIS_TERRAFORM_VERSION}"' - - run: terragrunt apply -no-color $PLANFILE + - run: terragrunt apply $PLANFILE ``` **NOTE:** If using the repo's `atlantis.yaml` file, you will need to specify each directory that is a Terragrunt project. diff --git a/runatlantis.io/docs/deployment.md b/runatlantis.io/docs/deployment.md index 45685cb075..542afad983 100644 --- a/runatlantis.io/docs/deployment.md +++ b/runatlantis.io/docs/deployment.md @@ -73,7 +73,7 @@ To install: ```sh helm install atlantis runatlantis/atlantis -f values.yaml ``` - + If you are using helm v2, run: ```sh helm install -f values.yaml runatlantis/atlantis @@ -524,7 +524,7 @@ OpenShift runs Docker images with random user id's that use `/` as their home di ### AWS Fargate If you'd like to run Atlantis on [AWS Fargate](https://aws.amazon.com/fargate/) - check out the Atlantis module on the [Terraform Module Registry](https://tf-registry.herokuapp.com/modules/terraform-aws-modules/atlantis/aws/latest) + check out the Atlantis module on the [Terraform Module Registry](https://registry.terraform.io/modules/terraform-aws-modules/atlantis/aws/latest) and then check out the [Next Steps](#next-steps). ### Google Kubernetes Engine (GKE) diff --git a/runatlantis.io/docs/policy-checking.md b/runatlantis.io/docs/policy-checking.md index 5c22328216..ee95a32aba 100644 --- a/runatlantis.io/docs/policy-checking.md +++ b/runatlantis.io/docs/policy-checking.md @@ -49,9 +49,36 @@ policies: source: local ``` -`name` - A name of your policy set. -`path` - Path to a policies directory. -`source` - Tells atlantis where to fetch the policies from. Currently you can only host policies locally by using `local`. +- `name` - A name of your policy set. +- `path` - Path to a policies directory. *Note: replace `` with absolute dir path to conftest policy/policies.* +- `source` - Tells atlantis where to fetch the policies from. Currently you can only host policies locally by using `local`. + +By default conftest is configured to only run the `main` package. If you wish to run specific/multiple policies consider passing `--namespace` or `--all-namespaces` to conftest with [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) via a custom workflow as shown in the below example. + +Example Server Side Repo configuration using `--all-namespaces` and a local src dir. + +``` +repos: + - id: github.com/myorg/example-repo +policies + owners: + users: + - example-dev + policy_sets: + - name: example-conf-tests + path: /home/atlantis/conftest_policies # Consider seperate vcs & mount into container + source: local +workflows: + custom: + plan: + steps: + - init + - plan + policy_check: + steps: + - policy_check: + extra_args: ["-p /home/atlantis/conftest_policies/", "--all-namespaces"] +``` ### Step 3: Write the policy @@ -91,8 +118,4 @@ deny[msg] { ``` -::: tip Notes -By default conftest is configured to only run the `main` package. If you want to change this behavior [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) can be used to pass in flags to conftest such as `--namespace` or `--all-namespaces` -::: - That's it! Now your Atlantis instance is configured to run policies on your Terraform plans 🎉 diff --git a/runatlantis.io/docs/repo-level-atlantis-yaml.md b/runatlantis.io/docs/repo-level-atlantis-yaml.md index 4b41068e82..7546f432a6 100644 --- a/runatlantis.io/docs/repo-level-atlantis-yaml.md +++ b/runatlantis.io/docs/repo-level-atlantis-yaml.md @@ -71,6 +71,9 @@ workflows: steps: - run: echo hi - apply +allowed_regexp_prefixes: +- dev/ +- staging/ ``` ## Use Cases @@ -194,6 +197,7 @@ automerge: delete_source_branch_on_merge: projects: workflows: +allowed_regexp_prefixes: ``` | Key | Type | Default | Required | Description | |-------------------------------|----------------------------------------------------------|---------|----------|-------------------------------------------------------------| @@ -202,6 +206,7 @@ workflows: | delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge | | projects | array[[Project](repo-level-atlantis-yaml.html#project)] | `[]` | no | Lists the projects in this repo | | workflows
*(restricted)* | map[string: [Workflow](custom-workflows.html#reference)] | `{}` | no | Custom workflows | +| allowed_regexp_prefixes | array[string] | `[]` | no | Lists the allowed regexp prefixes to use when the [`--enable-regexp-cmd`](server-configuration.html#enable-regexp-cmd) flag is used ### Project ```yaml diff --git a/runatlantis.io/docs/security.md b/runatlantis.io/docs/security.md index 4793190afe..a1cfa57812 100644 --- a/runatlantis.io/docs/security.md +++ b/runatlantis.io/docs/security.md @@ -34,7 +34,7 @@ To prevent this, allowlist [Bitbucket's IP addresses](https://confluence.atlassi ## Mitigations ### Don't Use On Public Repos -Because anyone can comment on public pull requests, even with all the security mitigations available, it's still dangerous to run Atlantis on public repos until Atlantis gets an authentication system. +Because anyone can comment on public pull requests, even with all the security mitigations available, it's still dangerous to run Atlantis on public repos without proper configuration of the security settings. ### Don't Use `--allow-fork-prs` If you're running on a public repo (which isn't recommended, see above) you shouldn't set `--allow-fork-prs` (defaults to false) @@ -79,3 +79,12 @@ Azure DevOps supports sending a basic authentication header in all webhook event If you're using webhook secrets but your traffic is over HTTP then the webhook secrets could be stolen. Enable SSL/HTTPS using the `--ssl-cert-file` and `--ssl-key-file` flags. + +### Enable Authentication on Atlantis Web Server +It is very reccomended to enable authentication in the web service. Enable BasicAuth using the `--web-basic-auth=true` and setup a username and a password using `--web-username=yourUsername` and `--web-password=yourPassword` flags. + +You can also pass these as environment variables `ATLANTIS_WEB_BASIC_AUTH=true` `ATLANTIS_WEB_USERNAME=yourUsername` and `ATLANTIS_WEB_PASSWORD=yourPassword`. + +::tip Tip +We do encourage the usage of complex passwords in order to prevent basic bruteforcing attacks. +::: \ No newline at end of file diff --git a/runatlantis.io/docs/server-configuration.md b/runatlantis.io/docs/server-configuration.md index 34487bc534..e4bfd60041 100644 --- a/runatlantis.io/docs/server-configuration.md +++ b/runatlantis.io/docs/server-configuration.md @@ -155,7 +155,7 @@ Values are chosen in this order: * ### `--azuredevops-token` ```bash - atlantis server --azuredevops-token="username@example.com" + atlantis server --azuredevops-token="RandomStringProducedByAzureDevOps" ``` Azure DevOps token of API user. Can also be specified via the ATLANTIS_AZUREDEVOPS_TOKEN environment variable. @@ -309,7 +309,7 @@ Values are chosen in this order: # or (recommended) ATLANTIS_GH_WEBHOOK_SECRET='secret' atlantis server ``` - Secret used to validate GitHub webhooks (see [https://developer.github.com/webhooks/securing/](https://docs.github.com/en/free-pro-team@latest/developers/webhooks-and-events/securing-your-webhooks)). + Secret used to validate GitHub webhooks (see [https://developer.github.com/webhooks/securing/](https://docs.github.com/en/developers/webhooks-and-events/webhooks/securing-your-webhooks)). ::: warning SECURITY WARNING If not specified, Atlantis won't be able to validate that the incoming webhook call came from GitHub. @@ -326,7 +326,7 @@ Values are chosen in this order: ```bash atlantis server --gh-app-id="00000" ``` - GitHub app ID. If set, GitHub authentication will be performed as [an installation](https://developer.github.com/v3/apps/installations/). + GitHub app ID. If set, GitHub authentication will be performed as [an installation](https://docs.github.com/en/rest/reference/apps#installations). ::: tip A GitHub app can be created by starting Atlantis first, then pointing your browser at @@ -348,17 +348,23 @@ Values are chosen in this order: ```bash atlantis server --gh-app-key-file="path/to/app-key.pem" ``` - Path to a GitHub App PEM encoded private key file. If set, GitHub authentication will be performed as [an installation](https://developer.github.com/v3/apps/installations/). + Path to a GitHub App PEM encoded private key file. If set, GitHub authentication will be performed as [an installation](https://docs.github.com/en/rest/reference/apps#installations). - ### `--gh-app-key` ```bash - atlantis server --gh-app-key="-----BEGIN RSA PRIVATE KEY-----(...)" + atlantis server --gh-app-key="-----BEGIN RSA PRIVATE KEY-----(...)" ``` The PEM encoded private key for the GitHub App. ::: warning SECURITY WARNING The contents of the private key will be visible by anyone that can run `ps` or look at the shell history of the machine where Atlantis is running. Use `--gh-app-key-file` to mitigate that risk. ::: +- +- ### `--gh-team-allowlist` + ```bash + atlantis server --gh-team-allowlist="myteam:plan, secteam:apply" + ``` + Comma-separated list of GitHub team and permission pairs. By default, any team can plan and apply. * ### `--gitlab-hostname` ```bash @@ -560,6 +566,12 @@ Values are chosen in this order: This is useful when running multiple Atlantis servers against a single repository so you can delegate work to each Atlantis server. Also useful when used with pre_workflow_hooks to dynamically generate an `atlantis.yaml` file. +* ### `--silence-vcs-status-no-plans` + ```bash + atlantis server --silence-vcs-status-no-plans + ``` + `--silence-vcs-status-no-plans` will tell Atlantis to ignore setting VCS status if none of the modified files are part of a project defined in the `atlantis.yaml` file. + * ### `--skip-clone-no-changes` ```bash atlantis server --skip-clone-no-changes diff --git a/runatlantis.io/docs/server-side-repo-config.md b/runatlantis.io/docs/server-side-repo-config.md index 0e2b3d4d44..b9a2b61677 100644 --- a/runatlantis.io/docs/server-side-repo-config.md +++ b/runatlantis.io/docs/server-side-repo-config.md @@ -348,6 +348,7 @@ custom workflows. ```yaml repos: - id: /.*/ + branch: /.*/ apply_requirements: [] workflow: default allowed_overrides: [] @@ -373,6 +374,7 @@ If you set a workflow with the key `default`, it will override this. | Key | Type | Default | Required | Description | |-------------------------------|----------|---------|----------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | id | string | none | yes | Value can be a regular expression when specified as /<regex>/ or an exact string match. Repo IDs are of the form `{vcs hostname}/{org}/{name}`, ex. `github.com/owner/repo`. Hostname is specified without scheme or port. For Bitbucket Server, {org} is the **name** of the project, not the key. | +| branch | string | none | no | An regex matching pull requests by base branch (the branch the pull request is getting merged into). By default, all branches are matched | | workflow | string | none | no | A custom workflow. | | apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved` and `mergeable`. See [Apply Requirements](apply-requirements.html) for more details. | | allowed_overrides | []string | none | no | A list of restricted keys that `atlantis.yaml` files can override. The only supported keys are `apply_requirements`, `workflow` and `delete_source_branch_on_merge` | diff --git a/runatlantis.io/docs/terraform-versions.md b/runatlantis.io/docs/terraform-versions.md index 36dae57d33..7b946c792c 100644 --- a/runatlantis.io/docs/terraform-versions.md +++ b/runatlantis.io/docs/terraform-versions.md @@ -27,3 +27,7 @@ See [Terraform `required_version`](https://www.terraform.io/docs/configuration/t Atlantis will automatically download the version specified. ::: +::: tip NOTE +The Atlantis [latest docker image](https://github.com/runatlantis/atlantis/pkgs/container/atlantis/9854680?tag=latest) tends to have recent versions of Terraform, but there may be a delay as new versions are released. The highest version of Terraform allowed in your code is the version specified by `DEFAULT_TERRAFORM_VERSION` in the image your server is running. +::: + diff --git a/runatlantis.io/docs/using-atlantis.md b/runatlantis.io/docs/using-atlantis.md index 24a0a29352..4695831e81 100644 --- a/runatlantis.io/docs/using-atlantis.md +++ b/runatlantis.io/docs/using-atlantis.md @@ -86,6 +86,7 @@ atlantis apply -w staging * `-d directory` Apply the plan for this directory, relative to root of repo. Use `.` for root. * `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w`. * `-w workspace` Apply the plan for this [Terraform workspace](https://www.terraform.io/docs/state/workspaces.html). If not using Terraform workspaces you can ignore this. +* `--auto-merge-disabled` Disable [automerge](automerging.html) for this apply command. * `--verbose` Append Atlantis log to comment. ### Additional Terraform flags diff --git a/runatlantis.io/guide/testing-locally.md b/runatlantis.io/guide/testing-locally.md index cbe371473b..55e04957cb 100644 --- a/runatlantis.io/guide/testing-locally.md +++ b/runatlantis.io/guide/testing-locally.md @@ -262,7 +262,7 @@ atlantis server \ --repo-allowlist="$REPO_ALLOWLIST" ``` -##### Azure DevOps +### Azure DevOps A certificate and private key are required if using Basic authentication for webhooks. diff --git a/scripts/binary-release.sh b/scripts/binary-release.sh deleted file mode 100755 index ffbf4c229d..0000000000 --- a/scripts/binary-release.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash - -# define architecture we want to build -XC_ARCH=${XC_ARCH:-"386 amd64 arm arm64"} -XC_OS=${XC_OS:-linux darwin} -XC_EXCLUDE_OSARCH="!darwin/arm !darwin/386 !darwin/arm64" - -# clean up -echo "-> running clean up...." -rm -rf output/* - -if ! which gox > /dev/null; then - echo "-> installing gox..." - # Need to run go get in a separate dir - # so it doesn't modify our go.mod. - SRC_DIR=$(pwd) - cd $(mktemp -d) - go mod init example.com/m - go get -u github.com/mitchellh/gox - cd "$SRC_DIR" -fi - -# build -# we want to build statically linked binaries -export CGO_ENABLED=0 -echo "-> building..." -gox \ - -os="${XC_OS}" \ - -arch="${XC_ARCH}" \ - -osarch="${XC_EXCLUDE_OSARCH}" \ - -output "output/{{.OS}}_{{.Arch}}/atlantis" \ - . - -# Zip and copy to the dist dir -echo "" -echo "Packaging..." -for PLATFORM in $(find ./output -mindepth 1 -maxdepth 1 -type d); do - OSARCH=$(basename ${PLATFORM}) - echo "--> ${OSARCH}" - - pushd $PLATFORM >/dev/null 2>&1 - zip ../atlantis_${OSARCH}.zip ./* - popd >/dev/null 2>&1 -done - -echo "" -echo "" -echo "-----------------------------------" -echo "Output:" -ls -alh output/ diff --git a/server/controllers/events/azuredevops_request_validator.go b/server/controllers/events/azuredevops_request_validator.go index 0b6e77e4f9..e0786d8eba 100644 --- a/server/controllers/events/azuredevops_request_validator.go +++ b/server/controllers/events/azuredevops_request_validator.go @@ -2,7 +2,7 @@ package events import ( "fmt" - "io/ioutil" + "io" "net/http" "github.com/mcdafydd/go-azuredevops/azuredevops" @@ -47,7 +47,7 @@ func (d *DefaultAzureDevopsRequestValidator) validateWithBasicAuth(r *http.Reque func (d *DefaultAzureDevopsRequestValidator) validateWithoutBasicAuth(r *http.Request) ([]byte, error) { ct := r.Header.Get("Content-Type") if ct == "application/json" || ct == "application/json; charset=utf-8" { - payload, err := ioutil.ReadAll(r.Body) + payload, err := io.ReadAll(r.Body) if err != nil { return nil, fmt.Errorf("could not read body: %s", err) } diff --git a/server/controllers/events/events_controller.go b/server/controllers/events/events_controller.go index 7164d10da7..cd4d097aeb 100644 --- a/server/controllers/events/events_controller.go +++ b/server/controllers/events/events_controller.go @@ -15,7 +15,7 @@ package events import ( "fmt" - "io/ioutil" + "io" "net/http" "strings" @@ -163,7 +163,7 @@ func (e *VCSEventsController) handleBitbucketCloudPost(w http.ResponseWriter, r eventType := r.Header.Get(bitbucketEventTypeHeader) reqID := r.Header.Get(bitbucketCloudRequestIDHeader) defer r.Body.Close() // nolint: errcheck - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) if err != nil { e.respond(w, logging.Error, http.StatusBadRequest, "Unable to read body: %s %s=%s", err, bitbucketCloudRequestIDHeader, reqID) return @@ -187,7 +187,7 @@ func (e *VCSEventsController) handleBitbucketServerPost(w http.ResponseWriter, r reqID := r.Header.Get(bitbucketServerRequestIDHeader) sig := r.Header.Get(bitbucketServerSignatureHeader) defer r.Body.Close() // nolint: errcheck - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) if err != nil { e.respond(w, logging.Error, http.StatusBadRequest, "Unable to read body: %s %s=%s", err, bitbucketServerRequestIDHeader, reqID) return @@ -205,7 +205,7 @@ func (e *VCSEventsController) handleBitbucketServerPost(w http.ResponseWriter, r } } switch eventType { - case bitbucketserver.PullCreatedHeader, bitbucketserver.PullMergedHeader, bitbucketserver.PullDeclinedHeader, bitbucketserver.PullDeletedHeader: + case bitbucketserver.PullCreatedHeader, bitbucketserver.PullFromRefUpdatedHeader, bitbucketserver.PullMergedHeader, bitbucketserver.PullDeclinedHeader, bitbucketserver.PullDeletedHeader: e.Logger.Debug("handling as pull request state changed event") e.handleBitbucketServerPullRequestEvent(w, eventType, body, reqID) return diff --git a/server/controllers/events/events_controller_e2e_test.go b/server/controllers/events/events_controller_e2e_test.go index fd0c6adfe1..3496d86642 100644 --- a/server/controllers/events/events_controller_e2e_test.go +++ b/server/controllers/events/events_controller_e2e_test.go @@ -3,7 +3,6 @@ package events_test import ( "bytes" "fmt" - "io/ioutil" "net/http" "net/http/httptest" "os" @@ -30,10 +29,12 @@ import ( "github.com/runatlantis/atlantis/server/events/mocks" "github.com/runatlantis/atlantis/server/events/mocks/matchers" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/events/vcs" vcsmocks "github.com/runatlantis/atlantis/server/events/vcs/mocks" "github.com/runatlantis/atlantis/server/events/webhooks" "github.com/runatlantis/atlantis/server/events/yaml" "github.com/runatlantis/atlantis/server/events/yaml/valid" + handlermocks "github.com/runatlantis/atlantis/server/handlers/mocks" "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) @@ -527,7 +528,7 @@ func TestSimlpleWorkflow_terraformLockFile(t *testing.T) { oldLockFilePath, err := filepath.Abs(filepath.Join("testfixtures", "null_provider_lockfile_old_version")) Ok(t, err) - oldLockFileContent, err := ioutil.ReadFile(oldLockFilePath) + oldLockFileContent, err := os.ReadFile(oldLockFilePath) Ok(t, err) if c.LockFileTracked { @@ -549,7 +550,7 @@ func TestSimlpleWorkflow_terraformLockFile(t *testing.T) { ResponseContains(t, w, 200, "Processing...") // check lock file content - actualLockFileContent, err := ioutil.ReadFile(fmt.Sprintf("%s/repos/runatlantis/atlantis-tests/2/default/.terraform.lock.hcl", atlantisWorkspace.DataDir)) + actualLockFileContent, err := os.ReadFile(fmt.Sprintf("%s/repos/runatlantis/atlantis-tests/2/default/.terraform.lock.hcl", atlantisWorkspace.DataDir)) Ok(t, err) if c.LockFileTracked { if string(oldLockFileContent) != string(actualLockFileContent) { @@ -578,7 +579,7 @@ func TestSimlpleWorkflow_terraformLockFile(t *testing.T) { } // check lock file content - actualLockFileContent, err = ioutil.ReadFile(fmt.Sprintf("%s/repos/runatlantis/atlantis-tests/2/default/.terraform.lock.hcl", atlantisWorkspace.DataDir)) + actualLockFileContent, err = os.ReadFile(fmt.Sprintf("%s/repos/runatlantis/atlantis-tests/2/default/.terraform.lock.hcl", atlantisWorkspace.DataDir)) Ok(t, err) if c.LockFileTracked { if string(oldLockFileContent) != string(actualLockFileContent) { @@ -726,6 +727,7 @@ func TestGitHubWorkflowWithPolicyCheck(t *testing.T) { userConfig.EnablePolicyChecksFlag = true ctrl, vcsClient, githubGetter, atlantisWorkspace := setupE2E(t, c.RepoDir) + // Set the repo to be cloned through the testing backdoor. repoDir, headSHA, cleanup := initializeRepo(t, c.RepoDir) defer cleanup() @@ -734,7 +736,9 @@ func TestGitHubWorkflowWithPolicyCheck(t *testing.T) { // Setup test dependencies. w := httptest.NewRecorder() When(vcsClient.PullIsMergeable(AnyRepo(), matchers.AnyModelsPullRequest())).ThenReturn(true, nil) - When(vcsClient.PullIsApproved(AnyRepo(), matchers.AnyModelsPullRequest())).ThenReturn(true, nil) + When(vcsClient.PullIsApproved(AnyRepo(), matchers.AnyModelsPullRequest())).ThenReturn(models.ApprovalStatus{ + IsApproved: true, + }, nil) When(githubGetter.GetPullRequest(AnyRepo(), AnyInt())).ThenReturn(GitHubPullRequestParsed(headSHA), nil) When(vcsClient.GetModifiedFiles(AnyRepo(), matchers.AnyModelsPullRequest())).ThenReturn(c.ModifiedFiles, nil) @@ -810,9 +814,10 @@ func setupE2E(t *testing.T, repoDir string) (events_controllers.VCSEventsControl // Mocks. e2eVCSClient := vcsmocks.NewMockClient() - e2eStatusUpdater := &events.DefaultCommitStatusUpdater{Client: e2eVCSClient} + e2eStatusUpdater := &events.DefaultCommitStatusUpdater{Client: e2eVCSClient, TitleBuilder: vcs.StatusTitleBuilder{TitlePrefix: "atlantis"}} e2eGithubGetter := mocks.NewMockGithubPullGetter() e2eGitlabGetter := mocks.NewMockGitlabMergeRequestGetter() + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() // Real dependencies. logger := logging.NewNoopLogger(t) @@ -827,7 +832,7 @@ func setupE2E(t *testing.T, repoDir string) (events_controllers.VCSEventsControl GithubUser: "github-user", GitlabUser: "gitlab-user", } - terraformClient, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "", "default-tf-version", "https://releases.hashicorp.com", &NoopTFDownloader{}, false) + terraformClient, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "", "default-tf-version", "https://releases.hashicorp.com", &NoopTFDownloader{}, false, projectCmdOutputHandler) Ok(t, err) boltdb, err := db.New(dataDir) Ok(t, err) @@ -935,8 +940,7 @@ func setupE2E(t *testing.T, repoDir string) (events_controllers.VCSEventsControl Webhooks: &mockWebhookSender{}, WorkingDirLocker: locker, AggregateApplyRequirements: &events.AggregateApplyRequirements{ - PullApprovedChecker: e2eVCSClient, - WorkingDir: workingDir, + WorkingDir: workingDir, }, } @@ -982,6 +986,8 @@ func setupE2E(t *testing.T, repoDir string) (events_controllers.VCSEventsControl boltdb, ) + e2ePullReqStatusFetcher := vcs.NewPullReqStatusFetcher(e2eVCSClient) + applyCommandRunner := events.NewApplyCommandRunner( e2eVCSClient, false, @@ -996,6 +1002,7 @@ func setupE2E(t *testing.T, repoDir string) (events_controllers.VCSEventsControl parallelPoolSize, silenceNoProjects, false, + e2ePullReqStatusFetcher, ) approvePoliciesCommandRunner := events.NewApprovePoliciesCommandRunner( @@ -1052,10 +1059,12 @@ func setupE2E(t *testing.T, repoDir string) (events_controllers.VCSEventsControl TestingMode: true, CommandRunner: commandRunner, PullCleaner: &events.PullClosedExecutor{ - Locker: lockingClient, - VCSClient: e2eVCSClient, - WorkingDir: workingDir, - DB: boltdb, + Locker: lockingClient, + VCSClient: e2eVCSClient, + WorkingDir: workingDir, + DB: boltdb, + PullClosedTemplate: &events.PullClosedEventTemplate{}, + LogStreamResourceCleaner: projectCmdOutputHandler, }, Logger: logger, Parser: eventParser, @@ -1084,7 +1093,7 @@ func (w *mockWebhookSender) Send(log logging.SimpleLogging, result webhooks.Appl } func GitHubCommentEvent(t *testing.T, comment string) *http.Request { - requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "githubIssueCommentEvent.json")) + requestJSON, err := os.ReadFile(filepath.Join("testfixtures", "githubIssueCommentEvent.json")) Ok(t, err) requestJSON = []byte(strings.Replace(string(requestJSON), "###comment body###", comment, 1)) req, err := http.NewRequest("POST", "/events", bytes.NewBuffer(requestJSON)) @@ -1095,7 +1104,7 @@ func GitHubCommentEvent(t *testing.T, comment string) *http.Request { } func GitHubPullRequestOpenedEvent(t *testing.T, headSHA string) *http.Request { - requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "githubPullRequestOpenedEvent.json")) + requestJSON, err := os.ReadFile(filepath.Join("testfixtures", "githubPullRequestOpenedEvent.json")) Ok(t, err) // Replace sha with expected sha. requestJSONStr := strings.Replace(string(requestJSON), "c31fd9ea6f557ad2ea659944c3844a059b83bc5d", headSHA, -1) @@ -1107,7 +1116,7 @@ func GitHubPullRequestOpenedEvent(t *testing.T, headSHA string) *http.Request { } func GitHubPullRequestClosedEvent(t *testing.T) *http.Request { - requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "githubPullRequestClosedEvent.json")) + requestJSON, err := os.ReadFile(filepath.Join("testfixtures", "githubPullRequestClosedEvent.json")) Ok(t, err) req, err := http.NewRequest("POST", "/events", bytes.NewBuffer(requestJSON)) Ok(t, err) @@ -1217,7 +1226,7 @@ func assertCommentEquals(t *testing.T, expReplies []string, act string, repoDir } for _, expFile := range expReplies { - exp, err := ioutil.ReadFile(filepath.Join(absRepoPath(t, repoDir), expFile)) + exp, err := os.ReadFile(filepath.Join(absRepoPath(t, repoDir), expFile)) Ok(t, err) expStr := string(exp) // My editor adds a newline to all the files, so if the actual comment @@ -1235,7 +1244,7 @@ func assertCommentEquals(t *testing.T, expReplies []string, act string, repoDir t.FailNow() } else { actFile := filepath.Join(absRepoPath(t, repoDir), expFile+".act") - err := ioutil.WriteFile(actFile, []byte(act), 0600) + err := os.WriteFile(actFile, []byte(act), 0600) Ok(t, err) cwd, err := os.Getwd() Ok(t, err) diff --git a/server/controllers/events/events_controller_test.go b/server/controllers/events/events_controller_test.go index 2ff5b1e4a5..dff2ffbb8b 100644 --- a/server/controllers/events/events_controller_test.go +++ b/server/controllers/events/events_controller_test.go @@ -17,9 +17,10 @@ import ( "bytes" "errors" "fmt" - "io/ioutil" + "io" "net/http" "net/http/httptest" + "os" "path/filepath" "reflect" "strings" @@ -201,7 +202,7 @@ func TestPost_GitlabCommentNotAllowlisted(t *testing.T) { RepoAllowlistChecker: &events.RepoAllowlistChecker{}, VCSClient: vcsClient, } - requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "gitlabMergeCommentEvent_notAllowlisted.json")) + requestJSON, err := os.ReadFile(filepath.Join("testfixtures", "gitlabMergeCommentEvent_notAllowlisted.json")) Ok(t, err) req, _ := http.NewRequest("GET", "", bytes.NewBuffer(requestJSON)) req.Header.Set(gitlabHeader, "Note Hook") @@ -209,7 +210,7 @@ func TestPost_GitlabCommentNotAllowlisted(t *testing.T) { e.Post(w, req) Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := ioutil.ReadAll(w.Result().Body) + body, _ := io.ReadAll(w.Result().Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) expRepo, _ := models.NewRepo(models.Gitlab, "gitlabhq/gitlab-test", "https://example.com/gitlabhq/gitlab-test.git", "", "") @@ -230,7 +231,7 @@ func TestPost_GitlabCommentNotAllowlistedWithSilenceErrors(t *testing.T) { VCSClient: vcsClient, SilenceAllowlistErrors: true, } - requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "gitlabMergeCommentEvent_notAllowlisted.json")) + requestJSON, err := os.ReadFile(filepath.Join("testfixtures", "gitlabMergeCommentEvent_notAllowlisted.json")) Ok(t, err) req, _ := http.NewRequest("GET", "", bytes.NewBuffer(requestJSON)) req.Header.Set(gitlabHeader, "Note Hook") @@ -238,7 +239,7 @@ func TestPost_GitlabCommentNotAllowlistedWithSilenceErrors(t *testing.T) { e.Post(w, req) Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := ioutil.ReadAll(w.Result().Body) + body, _ := io.ReadAll(w.Result().Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) vcsClient.VerifyWasCalled(Never()).CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString(), AnyString()) @@ -258,7 +259,7 @@ func TestPost_GithubCommentNotAllowlisted(t *testing.T) { RepoAllowlistChecker: &events.RepoAllowlistChecker{}, VCSClient: vcsClient, } - requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "githubIssueCommentEvent_notAllowlisted.json")) + requestJSON, err := os.ReadFile(filepath.Join("testfixtures", "githubIssueCommentEvent_notAllowlisted.json")) Ok(t, err) req, _ := http.NewRequest("GET", "", bytes.NewBuffer(requestJSON)) req.Header.Set("Content-Type", "application/json") @@ -267,7 +268,7 @@ func TestPost_GithubCommentNotAllowlisted(t *testing.T) { e.Post(w, req) Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := ioutil.ReadAll(w.Result().Body) + body, _ := io.ReadAll(w.Result().Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) expRepo, _ := models.NewRepo(models.Github, "baxterthehacker/public-repo", "https://github.com/baxterthehacker/public-repo.git", "", "") @@ -288,7 +289,7 @@ func TestPost_GithubCommentNotAllowlistedWithSilenceErrors(t *testing.T) { VCSClient: vcsClient, SilenceAllowlistErrors: true, } - requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "githubIssueCommentEvent_notAllowlisted.json")) + requestJSON, err := os.ReadFile(filepath.Join("testfixtures", "githubIssueCommentEvent_notAllowlisted.json")) Ok(t, err) req, _ := http.NewRequest("GET", "", bytes.NewBuffer(requestJSON)) req.Header.Set("Content-Type", "application/json") @@ -297,7 +298,7 @@ func TestPost_GithubCommentNotAllowlistedWithSilenceErrors(t *testing.T) { e.Post(w, req) Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := ioutil.ReadAll(w.Result().Body) + body, _ := io.ReadAll(w.Result().Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) vcsClient.VerifyWasCalled(Never()).CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString(), AnyString()) @@ -645,7 +646,7 @@ func TestPost_BBServerPullClosed(t *testing.T) { } // Build HTTP request. - requestBytes, err := ioutil.ReadFile(filepath.Join("testfixtures", "bb-server-pull-deleted-event.json")) + requestBytes, err := os.ReadFile(filepath.Join("testfixtures", "bb-server-pull-deleted-event.json")) // Replace the eventKey field with our event type. requestJSON := strings.Replace(string(requestBytes), `"eventKey":"pr:deleted",`, fmt.Sprintf(`"eventKey":"%s",`, c.header), -1) Ok(t, err) diff --git a/server/controllers/events/github_request_validator.go b/server/controllers/events/github_request_validator.go index 0d5845c1e2..9a7ffc19ba 100644 --- a/server/controllers/events/github_request_validator.go +++ b/server/controllers/events/github_request_validator.go @@ -16,7 +16,7 @@ package events import ( "errors" "fmt" - "io/ioutil" + "io" "net/http" "github.com/google/go-github/v31/github" @@ -60,7 +60,7 @@ func (d *DefaultGithubRequestValidator) validateAgainstSecret(r *http.Request, s func (d *DefaultGithubRequestValidator) validateWithoutSecret(r *http.Request) ([]byte, error) { switch ct := r.Header.Get("Content-Type"); ct { case "application/json": - payload, err := ioutil.ReadAll(r.Body) + payload, err := io.ReadAll(r.Body) if err != nil { return nil, fmt.Errorf("could not read body: %s", err) } diff --git a/server/controllers/events/gitlab_request_parser_validator.go b/server/controllers/events/gitlab_request_parser_validator.go index f1bffccebb..9755bd5d9e 100644 --- a/server/controllers/events/gitlab_request_parser_validator.go +++ b/server/controllers/events/gitlab_request_parser_validator.go @@ -16,7 +16,7 @@ package events import ( "encoding/json" "fmt" - "io/ioutil" + "io" "net/http" gitlab "github.com/xanzy/go-gitlab" @@ -68,7 +68,7 @@ func (d *DefaultGitlabRequestParserValidator) ParseAndValidate(r *http.Request, // Parse request into a gitlab object based on the object type specified // in the gitlabHeader. - bytes, err := ioutil.ReadAll(r.Body) + bytes, err := io.ReadAll(r.Body) if err != nil { return nil, err } diff --git a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir1.txt b/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir1.txt index 19bf762f1b..8c358550ec 100644 --- a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir1.txt +++ b/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir1.txt @@ -6,5 +6,6 @@ null_resource.automerge[0]: Creation complete after *s [id=*******************] Apply complete! Resources: 1 added, 0 changed, 0 destroyed. + ``` diff --git a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir1.txt.act b/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir1.txt.act deleted file mode 100644 index 19bf762f1b..0000000000 --- a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir1.txt.act +++ /dev/null @@ -1,10 +0,0 @@ -Ran Apply for dir: `dir1` workspace: `default` - -```diff -null_resource.automerge[0]: Creating... -null_resource.automerge[0]: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir2.txt b/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir2.txt index f486966159..c4f1a9ec09 100644 --- a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir2.txt +++ b/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir2.txt @@ -6,5 +6,6 @@ null_resource.automerge[0]: Creation complete after *s [id=*******************] Apply complete! Resources: 1 added, 0 changed, 0 destroyed. + ``` diff --git a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir2.txt.act b/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir2.txt.act deleted file mode 100644 index f486966159..0000000000 --- a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-apply-dir2.txt.act +++ /dev/null @@ -1,10 +0,0 @@ -Ran Apply for dir: `dir2` workspace: `default` - -```diff -null_resource.automerge[0]: Creating... -null_resource.automerge[0]: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-autoplan.txt b/server/controllers/events/testfixtures/test-repos/automerge/exp-output-autoplan.txt index 8aa62370f3..5a232aa80f 100644 --- a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-autoplan.txt +++ b/server/controllers/events/testfixtures/test-repos/automerge/exp-output-autoplan.txt @@ -21,6 +21,7 @@ Terraform will perform the following actions: Plan: 1 to add, 0 to change, 0 to destroy. + ``` * :arrow_forward: To **apply** this plan, comment: @@ -50,6 +51,7 @@ Terraform will perform the following actions: Plan: 1 to add, 0 to change, 0 to destroy. + ``` * :arrow_forward: To **apply** this plan, comment: diff --git a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-autoplan.txt.act b/server/controllers/events/testfixtures/test-repos/automerge/exp-output-autoplan.txt.act deleted file mode 100644 index 8aa62370f3..0000000000 --- a/server/controllers/events/testfixtures/test-repos/automerge/exp-output-autoplan.txt.act +++ /dev/null @@ -1,67 +0,0 @@ -Ran Plan for 2 projects: - -1. dir: `dir1` workspace: `default` -1. dir: `dir2` workspace: `default` - -### 1. dir: `dir1` workspace: `default` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.automerge[0] will be created -+ resource "null_resource" "automerge" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -### 2. dir: `dir2` workspace: `default` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.automerge[0] will be created -+ resource "null_resource" "automerge" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/modules-yaml/exp-output-apply-production.txt.act b/server/controllers/events/testfixtures/test-repos/modules-yaml/exp-output-apply-production.txt.act deleted file mode 100644 index 4885579d1e..0000000000 --- a/server/controllers/events/testfixtures/test-repos/modules-yaml/exp-output-apply-production.txt.act +++ /dev/null @@ -1,14 +0,0 @@ -Ran Apply for dir: `production` workspace: `default` - -```diff -module.null.null_resource.this: Creating... -module.null.null_resource.this: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "production" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/modules-yaml/exp-output-apply-staging.txt.act b/server/controllers/events/testfixtures/test-repos/modules-yaml/exp-output-apply-staging.txt.act deleted file mode 100644 index 44d7f37145..0000000000 --- a/server/controllers/events/testfixtures/test-repos/modules-yaml/exp-output-apply-staging.txt.act +++ /dev/null @@ -1,14 +0,0 @@ -Ran Apply for dir: `staging` workspace: `default` - -```diff -module.null.null_resource.this: Creating... -module.null.null_resource.this: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "staging" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/modules-yaml/exp-output-autoplan.txt.act b/server/controllers/events/testfixtures/test-repos/modules-yaml/exp-output-autoplan.txt.act deleted file mode 100644 index 6b1c2e2433..0000000000 --- a/server/controllers/events/testfixtures/test-repos/modules-yaml/exp-output-autoplan.txt.act +++ /dev/null @@ -1,73 +0,0 @@ -Ran Plan for 2 projects: - -1. dir: `staging` workspace: `default` -1. dir: `production` workspace: `default` - -### 1. dir: `staging` workspace: `default` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # module.null.null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "staging" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -### 2. dir: `production` workspace: `default` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # module.null.null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "production" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d production` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/modules/exp-output-apply-production.txt.act b/server/controllers/events/testfixtures/test-repos/modules/exp-output-apply-production.txt.act deleted file mode 100644 index 4885579d1e..0000000000 --- a/server/controllers/events/testfixtures/test-repos/modules/exp-output-apply-production.txt.act +++ /dev/null @@ -1,14 +0,0 @@ -Ran Apply for dir: `production` workspace: `default` - -```diff -module.null.null_resource.this: Creating... -module.null.null_resource.this: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "production" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/modules/exp-output-apply-staging.txt.act b/server/controllers/events/testfixtures/test-repos/modules/exp-output-apply-staging.txt.act deleted file mode 100644 index 44d7f37145..0000000000 --- a/server/controllers/events/testfixtures/test-repos/modules/exp-output-apply-staging.txt.act +++ /dev/null @@ -1,14 +0,0 @@ -Ran Apply for dir: `staging` workspace: `default` - -```diff -module.null.null_resource.this: Creating... -module.null.null_resource.this: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "staging" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/modules/exp-output-autoplan-only-staging.txt.act b/server/controllers/events/testfixtures/test-repos/modules/exp-output-autoplan-only-staging.txt.act deleted file mode 100644 index 50f8aca13c..0000000000 --- a/server/controllers/events/testfixtures/test-repos/modules/exp-output-autoplan-only-staging.txt.act +++ /dev/null @@ -1,37 +0,0 @@ -Ran Plan for dir: `staging` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # module.null.null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "staging" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/modules/exp-output-plan-production.txt.act b/server/controllers/events/testfixtures/test-repos/modules/exp-output-plan-production.txt.act deleted file mode 100644 index e238d50a49..0000000000 --- a/server/controllers/events/testfixtures/test-repos/modules/exp-output-plan-production.txt.act +++ /dev/null @@ -1,37 +0,0 @@ -Ran Plan for dir: `production` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # module.null.null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "production" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d production` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/modules/exp-output-plan-staging.txt.act b/server/controllers/events/testfixtures/test-repos/modules/exp-output-plan-staging.txt.act deleted file mode 100644 index 50f8aca13c..0000000000 --- a/server/controllers/events/testfixtures/test-repos/modules/exp-output-plan-staging.txt.act +++ /dev/null @@ -1,37 +0,0 @@ -Ran Plan for dir: `staging` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # module.null.null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "staging" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt b/server/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt index 3e9ebf0534..60495fce53 100644 --- a/server/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt @@ -10,6 +10,8 @@ FAIL - - main - WARNING: Null Resource creation is prohibit 1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions ``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. +* :heavy_check_mark: To **approve** failing policies an authorized approver can comment: + * `atlantis approve_policies` +* :repeat: Or, address the policy failure by modifying the codebase and re-planning. diff --git a/server/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt b/server/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt index 3e9ebf0534..60495fce53 100644 --- a/server/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt @@ -10,6 +10,8 @@ FAIL - - main - WARNING: Null Resource creation is prohibit 1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions ``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. +* :heavy_check_mark: To **approve** failing policies an authorized approver can comment: + * `atlantis approve_policies` +* :repeat: Or, address the policy failure by modifying the codebase and re-planning. diff --git a/server/controllers/events/testfixtures/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt b/server/controllers/events/testfixtures/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt index 20ba22012c..3e51cc0bd6 100644 --- a/server/controllers/events/testfixtures/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testfixtures/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt @@ -10,6 +10,8 @@ FAIL - - null_resource_policy - WARNING: Null Resource crea 1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions ``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. +* :heavy_check_mark: To **approve** failing policies an authorized approver can comment: + * `atlantis approve_policies` +* :repeat: Or, address the policy failure by modifying the codebase and re-planning. diff --git a/server/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt b/server/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt index 994b49925e..2df6974dbc 100644 --- a/server/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt @@ -30,7 +30,9 @@ FAIL - - main - WARNING: Forbidden Resource creation is pro 1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions ``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. +* :heavy_check_mark: To **approve** failing policies an authorized approver can comment: + * `atlantis approve_policies` +* :repeat: Or, address the policy failure by modifying the codebase and re-planning. --- diff --git a/server/controllers/events/testfixtures/test-repos/policy-checks/exp-output-auto-policy-check.txt b/server/controllers/events/testfixtures/test-repos/policy-checks/exp-output-auto-policy-check.txt index 3e9ebf0534..60495fce53 100644 --- a/server/controllers/events/testfixtures/test-repos/policy-checks/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testfixtures/test-repos/policy-checks/exp-output-auto-policy-check.txt @@ -10,6 +10,8 @@ FAIL - - main - WARNING: Null Resource creation is prohibit 1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions ``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. +* :heavy_check_mark: To **approve** failing policies an authorized approver can comment: + * `atlantis approve_policies` +* :repeat: Or, address the policy failure by modifying the codebase and re-planning. diff --git a/server/controllers/events/testfixtures/test-repos/server-side-cfg/exp-output-apply-default-workspace.txt.act b/server/controllers/events/testfixtures/test-repos/server-side-cfg/exp-output-apply-default-workspace.txt.act deleted file mode 100644 index 336a849553..0000000000 --- a/server/controllers/events/testfixtures/test-repos/server-side-cfg/exp-output-apply-default-workspace.txt.act +++ /dev/null @@ -1,14 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -workspace = "default" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/server-side-cfg/exp-output-apply-staging-workspace.txt.act b/server/controllers/events/testfixtures/test-repos/server-side-cfg/exp-output-apply-staging-workspace.txt.act deleted file mode 100644 index b36f8209c8..0000000000 --- a/server/controllers/events/testfixtures/test-repos/server-side-cfg/exp-output-apply-staging-workspace.txt.act +++ /dev/null @@ -1,14 +0,0 @@ -Ran Apply for dir: `.` workspace: `staging` - -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -workspace = "staging" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/server-side-cfg/exp-output-autoplan.txt.act b/server/controllers/events/testfixtures/test-repos/server-side-cfg/exp-output-autoplan.txt.act deleted file mode 100644 index 25eddec36e..0000000000 --- a/server/controllers/events/testfixtures/test-repos/server-side-cfg/exp-output-autoplan.txt.act +++ /dev/null @@ -1,79 +0,0 @@ -Ran Plan for 2 projects: - -1. dir: `.` workspace: `default` -1. dir: `.` workspace: `staging` - -### 1. dir: `.` workspace: `default` -
Show Output - -```diff -preinit custom - - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "default" - -postplan custom - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -### 2. dir: `.` workspace: `staging` -
Show Output - -```diff -preinit staging - - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "staging" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -w staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-apply-all.txt.act b/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-apply-all.txt.act deleted file mode 100644 index 04b926ff5e..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-apply-all.txt.act +++ /dev/null @@ -1,43 +0,0 @@ -Ran Apply for 2 projects: - -1. dir: `.` workspace: `default` -1. dir: `.` workspace: `staging` - -### 1. dir: `.` workspace: `default` -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "fromconfig" -workspace = "default" - -``` - ---- -### 2. dir: `.` workspace: `staging` -
Show Output - -```diff -preapply - -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "fromfile" -workspace = "staging" - -postapply - -``` -
- ---- - diff --git a/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-apply-default.txt.act b/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-apply-default.txt.act deleted file mode 100644 index 5e3d22778b..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-apply-default.txt.act +++ /dev/null @@ -1,15 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "fromconfig" -workspace = "default" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-apply-staging.txt.act b/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-apply-staging.txt.act deleted file mode 100644 index 88f2698f0b..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-apply-staging.txt.act +++ /dev/null @@ -1,22 +0,0 @@ -Ran Apply for dir: `.` workspace: `staging` - -
Show Output - -```diff -preapply - -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "fromfile" -workspace = "staging" - -postapply - -``` -
- diff --git a/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-autoplan.txt.act b/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-autoplan.txt.act deleted file mode 100644 index 5145516ef5..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple-yaml/exp-output-autoplan.txt.act +++ /dev/null @@ -1,79 +0,0 @@ -Ran Plan for 2 projects: - -1. dir: `.` workspace: `default` -1. dir: `.` workspace: `staging` - -### 1. dir: `.` workspace: `default` -
Show Output - -```diff -preinit - - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "fromconfig" -+ workspace = "default" - -postplan - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -### 2. dir: `.` workspace: `staging` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "fromfile" -+ workspace = "staging" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -w staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var-all.txt.act b/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var-all.txt.act deleted file mode 100644 index 11f65032da..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var-all.txt.act +++ /dev/null @@ -1,50 +0,0 @@ -Ran Apply for 2 projects: - -1. dir: `.` workspace: `default` -1. dir: `.` workspace: `new_workspace` - -### 1. dir: `.` workspace: `default` -
Show Output - -```diff -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 3 added, 0 changed, 0 destroyed. - -Outputs: - -var = "default_workspace" -workspace = "default" - -``` -
- ---- -### 2. dir: `.` workspace: `new_workspace` -
Show Output - -```diff -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 3 added, 0 changed, 0 destroyed. - -Outputs: - -var = "new_workspace" -workspace = "new_workspace" - -``` -
- ---- - diff --git a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var-default-workspace.txt.act b/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var-default-workspace.txt.act deleted file mode 100644 index cfa21dde33..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var-default-workspace.txt.act +++ /dev/null @@ -1,22 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -
Show Output - -```diff -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 3 added, 0 changed, 0 destroyed. - -Outputs: - -var = "default_workspace" -workspace = "default" - -``` -
- diff --git a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var-new-workspace.txt.act b/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var-new-workspace.txt.act deleted file mode 100644 index 8c1a0bac5e..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var-new-workspace.txt.act +++ /dev/null @@ -1,22 +0,0 @@ -Ran Apply for dir: `.` workspace: `new_workspace` - -
Show Output - -```diff -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 3 added, 0 changed, 0 destroyed. - -Outputs: - -var = "new_workspace" -workspace = "new_workspace" - -``` -
- diff --git a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var.txt.act b/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var.txt.act deleted file mode 100644 index 59aff5f18c..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply-var.txt.act +++ /dev/null @@ -1,22 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -
Show Output - -```diff -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 3 added, 0 changed, 0 destroyed. - -Outputs: - -var = "overridden" -workspace = "default" - -``` -
- diff --git a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply.txt.act b/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply.txt.act deleted file mode 100644 index 98ffd366e1..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple/exp-output-apply.txt.act +++ /dev/null @@ -1,22 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -
Show Output - -```diff -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 3 added, 0 changed, 0 destroyed. - -Outputs: - -var = "default" -workspace = "default" - -``` -
- diff --git a/server/controllers/events/testfixtures/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt.act b/server/controllers/events/testfixtures/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt.act deleted file mode 100644 index b725eb1bf6..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt.act +++ /dev/null @@ -1,48 +0,0 @@ -Ran Plan for dir: `.` workspace: `new_workspace` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - - # null_resource.simple2 will be created -+ resource "null_resource" "simple2" { - + id = (known after apply) - } - - # null_resource.simple3 will be created -+ resource "null_resource" "simple3" { - + id = (known after apply) - } - -Plan: 3 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "new_workspace" -+ workspace = "new_workspace" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w new_workspace` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -w new_workspace -- -var var=new_workspace` -
-Plan: 3 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt.act b/server/controllers/events/testfixtures/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt.act deleted file mode 100644 index bc608ceb14..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt.act +++ /dev/null @@ -1,48 +0,0 @@ -Ran Plan for dir: `.` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - - # null_resource.simple2 will be created -+ resource "null_resource" "simple2" { - + id = (known after apply) - } - - # null_resource.simple3 will be created -+ resource "null_resource" "simple3" { - + id = (known after apply) - } - -Plan: 3 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "overridden" -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=overridden` -
-Plan: 3 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/simple/exp-output-atlantis-plan.txt.act b/server/controllers/events/testfixtures/test-repos/simple/exp-output-atlantis-plan.txt.act deleted file mode 100644 index c56cd47e14..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple/exp-output-atlantis-plan.txt.act +++ /dev/null @@ -1,48 +0,0 @@ -Ran Plan for dir: `.` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - - # null_resource.simple2 will be created -+ resource "null_resource" "simple2" { - + id = (known after apply) - } - - # null_resource.simple3 will be created -+ resource "null_resource" "simple3" { - + id = (known after apply) - } - -Plan: 3 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "default_workspace" -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=default_workspace` -
-Plan: 3 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/simple/exp-output-autoplan.txt.act b/server/controllers/events/testfixtures/test-repos/simple/exp-output-autoplan.txt.act deleted file mode 100644 index b301024b0c..0000000000 --- a/server/controllers/events/testfixtures/test-repos/simple/exp-output-autoplan.txt.act +++ /dev/null @@ -1,48 +0,0 @@ -Ran Plan for dir: `.` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - - # null_resource.simple2 will be created -+ resource "null_resource" "simple2" { - + id = (known after apply) - } - - # null_resource.simple3 will be created -+ resource "null_resource" "simple3" { - + id = (known after apply) - } - -Plan: 3 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "default" -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -
-Plan: 3 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-default.txt.act b/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-default.txt.act deleted file mode 100644 index ccc0bfe017..0000000000 --- a/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-default.txt.act +++ /dev/null @@ -1,15 +0,0 @@ -Ran Apply for project: `default` dir: `.` workspace: `default` - -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "default" -workspace = "default" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-staging.txt.act b/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-staging.txt.act deleted file mode 100644 index 6d217cc7fd..0000000000 --- a/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-staging.txt.act +++ /dev/null @@ -1,15 +0,0 @@ -Ran Apply for project: `staging` dir: `.` workspace: `default` - -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "staging" -workspace = "default" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt.act b/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt.act deleted file mode 100644 index c97767f650..0000000000 --- a/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt.act +++ /dev/null @@ -1,38 +0,0 @@ -Ran Plan for project: `default` dir: `.` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "default" -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p default` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -p default` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt.act b/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt.act deleted file mode 100644 index 1c367d94bc..0000000000 --- a/server/controllers/events/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt.act +++ /dev/null @@ -1,38 +0,0 @@ -Ran Plan for project: `staging` dir: `.` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "staging" -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -p staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/tfvars-yaml/exp-output-apply-default.txt.act b/server/controllers/events/testfixtures/test-repos/tfvars-yaml/exp-output-apply-default.txt.act deleted file mode 100644 index ccc0bfe017..0000000000 --- a/server/controllers/events/testfixtures/test-repos/tfvars-yaml/exp-output-apply-default.txt.act +++ /dev/null @@ -1,15 +0,0 @@ -Ran Apply for project: `default` dir: `.` workspace: `default` - -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "default" -workspace = "default" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/tfvars-yaml/exp-output-apply-staging.txt.act b/server/controllers/events/testfixtures/test-repos/tfvars-yaml/exp-output-apply-staging.txt.act deleted file mode 100644 index 6d217cc7fd..0000000000 --- a/server/controllers/events/testfixtures/test-repos/tfvars-yaml/exp-output-apply-staging.txt.act +++ /dev/null @@ -1,15 +0,0 @@ -Ran Apply for project: `staging` dir: `.` workspace: `default` - -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -var = "staging" -workspace = "default" - -``` - diff --git a/server/controllers/events/testfixtures/test-repos/tfvars-yaml/exp-output-autoplan.txt.act b/server/controllers/events/testfixtures/test-repos/tfvars-yaml/exp-output-autoplan.txt.act deleted file mode 100644 index 73619713ad..0000000000 --- a/server/controllers/events/testfixtures/test-repos/tfvars-yaml/exp-output-autoplan.txt.act +++ /dev/null @@ -1,77 +0,0 @@ -Ran Plan for 2 projects: - -1. project: `default` dir: `.` workspace: `default` -1. project: `staging` dir: `.` workspace: `default` - -### 1. project: `default` dir: `.` workspace: `default` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "default" -+ workspace = "default" - -workspace=default - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p default` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -p default` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -### 2. project: `staging` dir: `.` workspace: `default` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ var = "staging" -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -p staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-apply-all-production.txt.act b/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-apply-all-production.txt.act deleted file mode 100644 index b82518ed6b..0000000000 --- a/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-apply-all-production.txt.act +++ /dev/null @@ -1,34 +0,0 @@ -Ran Apply for 2 projects: - -1. dir: `production` workspace: `production` -1. dir: `staging` workspace: `staging` - -### 1. dir: `production` workspace: `production` -```diff -null_resource.this: Creating... -null_resource.this: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -workspace = "production" - -``` - ---- -### 2. dir: `staging` workspace: `staging` -```diff -null_resource.this: Creating... -null_resource.this: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -workspace = "staging" - -``` - ---- - diff --git a/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-apply-all-staging.txt.act b/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-apply-all-staging.txt.act deleted file mode 100644 index b82518ed6b..0000000000 --- a/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-apply-all-staging.txt.act +++ /dev/null @@ -1,34 +0,0 @@ -Ran Apply for 2 projects: - -1. dir: `production` workspace: `production` -1. dir: `staging` workspace: `staging` - -### 1. dir: `production` workspace: `production` -```diff -null_resource.this: Creating... -null_resource.this: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -workspace = "production" - -``` - ---- -### 2. dir: `staging` workspace: `staging` -```diff -null_resource.this: Creating... -null_resource.this: Creation complete after *s [id=*******************] - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -workspace = "staging" - -``` - ---- - diff --git a/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt.act b/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt.act deleted file mode 100644 index a8f4b695a0..0000000000 --- a/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt.act +++ /dev/null @@ -1,73 +0,0 @@ -Ran Plan for 2 projects: - -1. dir: `production` workspace: `production` -1. dir: `staging` workspace: `staging` - -### 1. dir: `production` workspace: `production` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "production" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production -w production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d production -w production` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -### 2. dir: `staging` workspace: `staging` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "staging" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging -w staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt.act b/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt.act deleted file mode 100644 index a8f4b695a0..0000000000 --- a/server/controllers/events/testfixtures/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt.act +++ /dev/null @@ -1,73 +0,0 @@ -Ran Plan for 2 projects: - -1. dir: `production` workspace: `production` -1. dir: `staging` workspace: `staging` - -### 1. dir: `production` workspace: `production` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "production" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production -w production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d production -w production` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -### 2. dir: `staging` workspace: `staging` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "staging" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging -w staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/controllers/github_app_controller.go b/server/controllers/github_app_controller.go index d8808cd835..df8b24de30 100644 --- a/server/controllers/github_app_controller.go +++ b/server/controllers/github_app_controller.go @@ -18,6 +18,7 @@ type GithubAppController struct { GithubSetupComplete bool GithubHostname string GithubOrg string + GithubStatusName string } type githubWebhook struct { @@ -55,7 +56,7 @@ func (g *GithubAppController) ExchangeCode(w http.ResponseWriter, r *http.Reques g.Logger.Debug("Exchanging GitHub app code for app credentials") creds := &vcs.GithubAnonymousCredentials{} - client, err := vcs.NewGithubClient(g.GithubHostname, creds, g.Logger) + client, err := vcs.NewGithubClient(g.GithubHostname, creds, g.Logger, g.GithubStatusName) if err != nil { g.respond(w, logging.Error, http.StatusInternalServerError, "Failed to exchange code for github app: %s", err) return diff --git a/server/controllers/jobs_controller.go b/server/controllers/jobs_controller.go new file mode 100644 index 0000000000..872176c66d --- /dev/null +++ b/server/controllers/jobs_controller.go @@ -0,0 +1,144 @@ +package controllers + +import ( + "fmt" + "net/http" + "net/url" + + "strconv" + + "github.com/gorilla/mux" + "github.com/pkg/errors" + "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/websocket" + "github.com/runatlantis/atlantis/server/core/db" + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" +) + +type JobsController struct { + AtlantisVersion string + AtlantisURL *url.URL + Logger logging.SimpleLogging + ProjectJobsTemplate templates.TemplateWriter + ProjectJobsErrorTemplate templates.TemplateWriter + Db *db.BoltDB + WsMux *websocket.Multiplexor +} + +type ProjectInfoKeyGenerator struct{} + +func (g ProjectInfoKeyGenerator) Generate(r *http.Request) (string, error) { + projectInfo, err := newProjectInfo(r) + + if err != nil { + return "", errors.Wrap(err, "creating project info") + } + + return projectInfo.String(), nil +} + +type pullInfo struct { + org string + repo string + pull int +} + +func (p *pullInfo) String() string { + return fmt.Sprintf("%s/%s/%d", p.org, p.repo, p.pull) +} + +type projectInfo struct { + projectName string + workspace string + pullInfo +} + +func (p *projectInfo) String() string { + return fmt.Sprintf("%s/%s/%d/%s/%s", p.org, p.repo, p.pull, p.projectName, p.workspace) +} + +func newPullInfo(r *http.Request) (*pullInfo, error) { + org, ok := mux.Vars(r)["org"] + if !ok { + return nil, fmt.Errorf("Internal error: no org in route") + } + repo, ok := mux.Vars(r)["repo"] + if !ok { + return nil, fmt.Errorf("Internal error: no repo in route") + } + pull, ok := mux.Vars(r)["pull"] + if !ok { + return nil, fmt.Errorf("Internal error: no pull in route") + } + pullNum, err := strconv.Atoi(pull) + if err != nil { + return nil, err + } + + return &pullInfo{ + org: org, + repo: repo, + pull: pullNum, + }, nil +} + +// Gets the PR information from the HTTP request params +func newProjectInfo(r *http.Request) (*projectInfo, error) { + pullInfo, err := newPullInfo(r) + if err != nil { + return nil, err + } + + project, ok := mux.Vars(r)["project"] + if !ok { + return nil, fmt.Errorf("Internal error: no project in route") + } + + workspace, ok := mux.Vars(r)["workspace"] + if !ok { + return nil, fmt.Errorf("Internal error: no workspace in route") + } + + return &projectInfo{ + pullInfo: *pullInfo, + projectName: project, + workspace: workspace, + }, nil +} + +func (j *JobsController) GetProjectJobs(w http.ResponseWriter, r *http.Request) { + projectInfo, err := newProjectInfo(r) + if err != nil { + j.respond(w, logging.Error, http.StatusInternalServerError, err.Error()) + return + } + + viewData := templates.ProjectJobData{ + AtlantisVersion: j.AtlantisVersion, + ProjectPath: projectInfo.String(), + CleanedBasePath: j.AtlantisURL.Path, + ClearMsg: models.LogStreamingClearMsg, + } + + err = j.ProjectJobsTemplate.Execute(w, viewData) + if err != nil { + j.Logger.Err(err.Error()) + } +} + +func (j *JobsController) GetProjectJobsWS(w http.ResponseWriter, r *http.Request) { + err := j.WsMux.Handle(w, r) + + if err != nil { + j.respond(w, logging.Error, http.StatusInternalServerError, err.Error()) + return + } +} + +func (j *JobsController) respond(w http.ResponseWriter, lvl logging.LogLevel, responseCode int, format string, args ...interface{}) { + response := fmt.Sprintf(format, args...) + j.Logger.Log(lvl, response) + w.WriteHeader(responseCode) + fmt.Fprintln(w, response) +} diff --git a/server/controllers/status_controller_test.go b/server/controllers/status_controller_test.go index e809099e1a..bc51611558 100644 --- a/server/controllers/status_controller_test.go +++ b/server/controllers/status_controller_test.go @@ -3,7 +3,7 @@ package controllers_test import ( "bytes" "encoding/json" - "io/ioutil" + "io" "net/http" "net/http/httptest" "testing" @@ -26,7 +26,7 @@ func TestStatusController_Startup(t *testing.T) { d.Get(w, r) var result controllers.StatusResponse - body, err := ioutil.ReadAll(w.Result().Body) + body, err := io.ReadAll(w.Result().Body) Ok(t, err) Equals(t, 200, w.Result().StatusCode) err = json.Unmarshal(body, &result) @@ -49,7 +49,7 @@ func TestStatusController_InProgress(t *testing.T) { d.Get(w, r) var result controllers.StatusResponse - body, err := ioutil.ReadAll(w.Result().Body) + body, err := io.ReadAll(w.Result().Body) Ok(t, err) Equals(t, 200, w.Result().StatusCode) err = json.Unmarshal(body, &result) @@ -72,7 +72,7 @@ func TestStatusController_Shutdown(t *testing.T) { d.Get(w, r) var result controllers.StatusResponse - body, err := ioutil.ReadAll(w.Result().Body) + body, err := io.ReadAll(w.Result().Body) Ok(t, err) Equals(t, 200, w.Result().StatusCode) err = json.Unmarshal(body, &result) diff --git a/server/controllers/templates/web_templates.go b/server/controllers/templates/web_templates.go index 4c120d4558..db1cd5cd96 100644 --- a/server/controllers/templates/web_templates.go +++ b/server/controllers/templates/web_templates.go @@ -352,6 +352,168 @@ v{{ .AtlantisVersion }} `)) +// ProjectJobData holds the data needed to stream the current PR information +type ProjectJobData struct { + AtlantisVersion string + ProjectPath string + CleanedBasePath string + ClearMsg string +} + +var ProjectJobsTemplate = template.Must(template.New("blank.html.tmpl").Parse(` + + + + + atlantis + + + + + + + + + + + +
+ +

atlantis

+

+
+
+
+
+
+
+ +
+
+ + + + + + + + + +`)) + +type ProjectJobsError struct { + AtlantisVersion string + ProjectPath string + CleanedBasePath string +} + +var ProjectJobsErrorTemplate = template.Must(template.New("blank.html.tmpl").Parse(` + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+

+
+
+
+
+
+
+
+
+
+ + + + + + + + + +`)) + // GithubSetupData holds the data for rendering the github app setup page type GithubSetupData struct { Target string diff --git a/server/controllers/websocket/mux.go b/server/controllers/websocket/mux.go new file mode 100644 index 0000000000..ccfbdf99f9 --- /dev/null +++ b/server/controllers/websocket/mux.go @@ -0,0 +1,63 @@ +package websocket + +import ( + "net/http" + + "github.com/gorilla/websocket" + "github.com/pkg/errors" + "github.com/runatlantis/atlantis/server/logging" +) + +// PartitionKeyGenerator generates partition keys for the multiplexor +type PartitionKeyGenerator interface { + Generate(r *http.Request) (string, error) +} + +// PartitionRegistry is the registry holding each partition +// and is responsible for registering/deregistering new buffers +type PartitionRegistry interface { + Register(key string, buffer chan string) + Deregister(key string, buffer chan string) +} + +// Multiplexor is responsible for handling the data transfer between the storage layer +// and the registry. Note this is still a WIP as right now the registry is assumed to handle +// everything. +type Multiplexor struct { + writer *Writer + keyGenerator PartitionKeyGenerator + registry PartitionRegistry +} + +func NewMultiplexor(log logging.SimpleLogging, keyGenerator PartitionKeyGenerator, registry PartitionRegistry) *Multiplexor { + upgrader := websocket.Upgrader{} + upgrader.CheckOrigin = func(r *http.Request) bool { return true } + return &Multiplexor{ + writer: &Writer{ + upgrader: upgrader, + log: log, + }, + keyGenerator: keyGenerator, + registry: registry, + } +} + +// Handle should be called for a given websocket request. It blocks +// while writing to the websocket until the buffer is closed. +func (m *Multiplexor) Handle(w http.ResponseWriter, r *http.Request) error { + key, err := m.keyGenerator.Generate(r) + + if err != nil { + return errors.Wrapf(err, "generating partition key") + } + + // Buffer size set to 1000 to ensure messages get queued. + // TODO: make buffer size configurable + buffer := make(chan string, 1000) + + // spinning up a goroutine for this since we are attempting to block on the read side. + go m.registry.Register(key, buffer) + defer m.registry.Deregister(key, buffer) + + return errors.Wrapf(m.writer.Write(w, r, buffer), "writing to ws %s", key) +} diff --git a/server/controllers/websocket/writer.go b/server/controllers/websocket/writer.go new file mode 100644 index 0000000000..1e19e50376 --- /dev/null +++ b/server/controllers/websocket/writer.go @@ -0,0 +1,68 @@ +package websocket + +import ( + "net/http" + + "github.com/gorilla/websocket" + "github.com/pkg/errors" + "github.com/runatlantis/atlantis/server/logging" +) + +func NewWriter(log logging.SimpleLogging) *Writer { + upgrader := websocket.Upgrader{} + upgrader.CheckOrigin = func(r *http.Request) bool { return true } + return &Writer{ + upgrader: upgrader, + log: log, + } +} + +type Writer struct { + upgrader websocket.Upgrader + log logging.SimpleLogging +} + +func (w *Writer) Write(rw http.ResponseWriter, r *http.Request, input chan string) error { + conn, err := w.upgrader.Upgrade(rw, r, nil) + + if err != nil { + return errors.Wrap(err, "upgrading websocket connection") + } + + conn.SetCloseHandler(func(code int, text string) error { + // Close the channnel after websocket connection closed. + // Will gracefully exit the ProjectCommandOutputHandler.Register() call and cleanup. + // is it good practice to close at the receiver? Probably not, we should figure out a better + // way to handle this case + close(input) + return nil + }) + + // Add a reader goroutine to listen for socket.close() events. + go w.setReadHandler(conn) + + // block on reading our input channel + for msg := range input { + if err := conn.WriteMessage(websocket.BinaryMessage, []byte("\r"+msg+"\n")); err != nil { + w.log.Warn("Failed to write ws message: %s", err) + return err + } + } + + return nil +} + +func (w *Writer) setReadHandler(c *websocket.Conn) { + for { + _, _, err := c.ReadMessage() + if err != nil { + // CloseGoingAway (1001) when a browser tab is closed. + // Expected behaviour since we have a CloseHandler(), log warning if not a CloseGoingAway + if websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway) { + w.log.Warn("Failed to read WS message: %s", err) + } + return + } + } + +} diff --git a/server/core/db/boltdb_test.go b/server/core/db/boltdb_test.go index ca41c6c00b..24b0342f70 100644 --- a/server/core/db/boltdb_test.go +++ b/server/core/db/boltdb_test.go @@ -14,7 +14,6 @@ package db_test import ( - "io/ioutil" "os" "testing" "time" @@ -769,7 +768,7 @@ func TestPullStatus_UpdateMerge(t *testing.T) { // newTestDB returns a TestDB using a temporary path. func newTestDB() (*bolt.DB, *db.BoltDB) { // Retrieve a temporary path. - f, err := ioutil.TempFile("", "") + f, err := os.CreateTemp("", "") if err != nil { panic(errors.Wrap(err, "failed to create temp file")) } diff --git a/server/core/locking/locking_test.go b/server/core/locking/locking_test.go index fd8ce1e328..316b770ba5 100644 --- a/server/core/locking/locking_test.go +++ b/server/core/locking/locking_test.go @@ -178,7 +178,7 @@ func TestGetLock_NoOpLocker(t *testing.T) { l := locking.NewNoOpLocker() lock, err := l.GetLock("owner/repo/path/workspace") Ok(t, err) - var expected *models.ProjectLock = nil + var expected *models.ProjectLock Equals(t, expected, lock) } diff --git a/server/core/runtime/apply_step_runner.go b/server/core/runtime/apply_step_runner.go index 3ce0d79ca9..9fe422da15 100644 --- a/server/core/runtime/apply_step_runner.go +++ b/server/core/runtime/apply_step_runner.go @@ -2,7 +2,6 @@ package runtime import ( "fmt" - "io/ioutil" "os" "path/filepath" "reflect" @@ -17,6 +16,7 @@ import ( // ApplyStepRunner runs `terraform apply`. type ApplyStepRunner struct { TerraformExecutor TerraformExec + DefaultTFVersion *version.Version CommitStatusUpdater StatusUpdater AsyncTFExec AsyncTFExec } @@ -27,7 +27,7 @@ func (a *ApplyStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []stri } planPath := filepath.Join(path, GetPlanFilename(ctx.Workspace, ctx.ProjectName)) - contents, err := ioutil.ReadFile(planPath) + contents, err := os.ReadFile(planPath) if os.IsNotExist(err) { return "", fmt.Errorf("no plan found at path %q and workspace %q–did you run plan?", ctx.RepoRelDir, ctx.Workspace) } @@ -40,7 +40,7 @@ func (a *ApplyStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []stri // TODO: Leverage PlanTypeStepRunnerDelegate here if IsRemotePlan(contents) { - args := append(append([]string{"apply", "-input=false", "-no-color"}, extraArgs...), ctx.EscapedCommentArgs...) + args := append(append([]string{"apply", "-input=false"}, extraArgs...), ctx.EscapedCommentArgs...) out, err = a.runRemoteApply(ctx, args, path, planPath, ctx.TerraformVersion, envs) if err == nil { out = a.cleanRemoteApplyOutput(out) @@ -48,8 +48,8 @@ func (a *ApplyStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []stri } else { // NOTE: we need to quote the plan path because Bitbucket Server can // have spaces in its repo owner names which is part of the path. - args := append(append(append([]string{"apply", "-input=false", "-no-color"}, extraArgs...), ctx.EscapedCommentArgs...), fmt.Sprintf("%q", planPath)) - out, err = a.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, args, envs, ctx.TerraformVersion, ctx.Workspace) + args := append(append(append([]string{"apply", "-input=false"}, extraArgs...), ctx.EscapedCommentArgs...), fmt.Sprintf("%q", planPath)) + out, err = a.TerraformExecutor.RunCommandWithVersion(ctx, path, args, envs, ctx.TerraformVersion, ctx.Workspace) } // If the apply was successful, delete the plan. @@ -118,7 +118,7 @@ func (a *ApplyStepRunner) runRemoteApply( // The planfile contents are needed to ensure that the plan didn't change // between plan and apply phases. - planfileBytes, err := ioutil.ReadFile(absPlanPath) + planfileBytes, err := os.ReadFile(absPlanPath) if err != nil { return "", errors.Wrap(err, "reading planfile") } @@ -132,7 +132,7 @@ func (a *ApplyStepRunner) runRemoteApply( // Start the async command execution. ctx.Log.Debug("starting async tf remote operation") - inCh, outCh := a.AsyncTFExec.RunCommandAsync(ctx.Log, filepath.Clean(path), applyArgs, envs, tfVersion, ctx.Workspace) + inCh, outCh := a.AsyncTFExec.RunCommandAsync(ctx, filepath.Clean(path), applyArgs, envs, tfVersion, ctx.Workspace) var lines []string nextLineIsRunURL := false var runURL string diff --git a/server/core/runtime/apply_step_runner_test.go b/server/core/runtime/apply_step_runner_test.go index 2130f4a816..f8df7187ee 100644 --- a/server/core/runtime/apply_step_runner_test.go +++ b/server/core/runtime/apply_step_runner_test.go @@ -1,7 +1,6 @@ package runtime_test import ( - "errors" "fmt" "io/ioutil" "os" @@ -12,6 +11,7 @@ import ( version "github.com/hashicorp/go-version" . "github.com/petergtz/pegomock" + "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/core/terraform" "github.com/runatlantis/atlantis/server/core/terraform/mocks" @@ -20,7 +20,7 @@ import ( "github.com/runatlantis/atlantis/server/events/mocks/matchers" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" - logging_matchers "github.com/runatlantis/atlantis/server/logging/mocks/matchers" + . "github.com/runatlantis/atlantis/testing" ) @@ -53,6 +53,13 @@ func TestRun_Success(t *testing.T) { defer cleanup() planPath := filepath.Join(tmpDir, "workspace.tfplan") err := ioutil.WriteFile(planPath, nil, 0600) + logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "workspace", + RepoRelDir: ".", + EscapedCommentArgs: []string{"comment", "args"}, + } Ok(t, err) RegisterMockTestingT(t) @@ -60,19 +67,13 @@ func TestRun_Success(t *testing.T) { o := runtime.ApplyStepRunner{ TerraformExecutor: terraform, } - logger := logging.NewNoopLogger(t) - When(terraform.RunCommandWithVersion(matchers.AnyPtrToLoggingSimpleLogger(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", nil) - output, err := o.Run(models.ProjectCommandContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: []string{"comment", "args"}, - }, []string{"extra", "args"}, tmpDir, map[string]string(nil)) + output, err := o.Run(ctx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) Ok(t, err) Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, tmpDir, []string{"apply", "-input=false", "-no-color", "extra", "args", "comment", "args", fmt.Sprintf("%q", planPath)}, map[string]string(nil), nil, "workspace") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, tmpDir, []string{"apply", "-input=false", "extra", "args", "comment", "args", fmt.Sprintf("%q", planPath)}, map[string]string(nil), nil, "workspace") _, err = os.Stat(planPath) Assert(t, os.IsNotExist(err), "planfile should be deleted") } @@ -83,6 +84,15 @@ func TestRun_AppliesCorrectProjectPlan(t *testing.T) { defer cleanup() planPath := filepath.Join(tmpDir, "projectname-default.tfplan") err := ioutil.WriteFile(planPath, nil, 0600) + + logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + ProjectName: "projectname", + EscapedCommentArgs: []string{"comment", "args"}, + } Ok(t, err) RegisterMockTestingT(t) @@ -90,20 +100,13 @@ func TestRun_AppliesCorrectProjectPlan(t *testing.T) { o := runtime.ApplyStepRunner{ TerraformExecutor: terraform, } - logger := logging.NewNoopLogger(t) - When(terraform.RunCommandWithVersion(matchers.AnyPtrToLoggingSimpleLogger(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", nil) - output, err := o.Run(models.ProjectCommandContext{ - Log: logger, - Workspace: "default", - RepoRelDir: ".", - ProjectName: "projectname", - EscapedCommentArgs: []string{"comment", "args"}, - }, []string{"extra", "args"}, tmpDir, map[string]string(nil)) + output, err := o.Run(ctx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) Ok(t, err) Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, tmpDir, []string{"apply", "-input=false", "-no-color", "extra", "args", "comment", "args", fmt.Sprintf("%q", planPath)}, map[string]string(nil), nil, "default") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, tmpDir, []string{"apply", "-input=false", "extra", "args", "comment", "args", fmt.Sprintf("%q", planPath)}, map[string]string(nil), nil, "default") _, err = os.Stat(planPath) Assert(t, os.IsNotExist(err), "planfile should be deleted") } @@ -112,29 +115,31 @@ func TestRun_UsesConfiguredTFVersion(t *testing.T) { tmpDir, cleanup := TempDir(t) defer cleanup() planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := ioutil.WriteFile(planPath, nil, 0600) + err := os.WriteFile(planPath, nil, 0600) Ok(t, err) - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - o := runtime.ApplyStepRunner{ - TerraformExecutor: terraform, - } logger := logging.NewNoopLogger(t) tfVersion, _ := version.NewVersion("0.11.0") - - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - output, err := o.Run(models.ProjectCommandContext{ + ctx := models.ProjectCommandContext{ Workspace: "workspace", RepoRelDir: ".", EscapedCommentArgs: []string{"comment", "args"}, TerraformVersion: tfVersion, Log: logger, - }, []string{"extra", "args"}, tmpDir, map[string]string(nil)) + } + + RegisterMockTestingT(t) + terraform := mocks.NewMockClient() + o := runtime.ApplyStepRunner{ + TerraformExecutor: terraform, + } + + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + ThenReturn("output", nil) + output, err := o.Run(ctx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) Ok(t, err) Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, tmpDir, []string{"apply", "-input=false", "-no-color", "extra", "args", "comment", "args", fmt.Sprintf("%q", planPath)}, map[string]string(nil), tfVersion, "workspace") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, tmpDir, []string{"apply", "-input=false", "extra", "args", "comment", "args", fmt.Sprintf("%q", planPath)}, map[string]string(nil), tfVersion, "workspace") _, err = os.Stat(planPath) Assert(t, os.IsNotExist(err), "planfile should be deleted") } @@ -197,7 +202,7 @@ func TestRun_UsingTarget(t *testing.T) { tmpDir, cleanup := TempDir(t) defer cleanup() planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := ioutil.WriteFile(planPath, nil, 0600) + err := os.WriteFile(planPath, nil, 0600) Ok(t, err) terraform := mocks.NewMockClient() step := runtime.ApplyStepRunner{ @@ -236,7 +241,7 @@ Terraform will perform the following actions: Plan: 0 to add, 0 to change, 1 to destroy.` - err := ioutil.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) + err := os.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) Ok(t, err) RegisterMockTestingT(t) @@ -268,7 +273,7 @@ null_resource.dir2[1]: Destruction complete after 0s Apply complete! Resources: 0 added, 0 changed, 1 destroyed. `, output) - Equals(t, []string{"apply", "-input=false", "-no-color", "extra", "args", "comment", "args"}, tfExec.CalledArgs) + Equals(t, []string{"apply", "-input=false", "extra", "args", "comment", "args"}, tfExec.CalledArgs) _, err = os.Stat(planPath) Assert(t, os.IsNotExist(err), "planfile should be deleted") @@ -294,7 +299,7 @@ Terraform will perform the following actions: Plan: 0 to add, 0 to change, 1 to destroy.` - err := ioutil.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) + err := os.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) Ok(t, err) RegisterMockTestingT(t) @@ -365,7 +370,7 @@ type remoteApplyMock struct { } // RunCommandAsync fakes out running terraform async. -func (r *remoteApplyMock) RunCommandAsync(log logging.SimpleLogging, path string, args []string, envs map[string]string, v *version.Version, workspace string) (chan<- string, <-chan terraform.Line) { +func (r *remoteApplyMock) RunCommandAsync(ctx models.ProjectCommandContext, path string, args []string, envs map[string]string, v *version.Version, workspace string) (chan<- string, <-chan terraform.Line) { r.CalledArgs = args in := make(chan string) diff --git a/server/events/runtime/common/common.go b/server/core/runtime/common/common.go similarity index 100% rename from server/events/runtime/common/common.go rename to server/core/runtime/common/common.go diff --git a/server/events/runtime/common/common_test.go b/server/core/runtime/common/common_test.go similarity index 100% rename from server/events/runtime/common/common_test.go rename to server/core/runtime/common/common_test.go diff --git a/server/core/runtime/init_step_runner.go b/server/core/runtime/init_step_runner.go index 77e1daad47..55092b2451 100644 --- a/server/core/runtime/init_step_runner.go +++ b/server/core/runtime/init_step_runner.go @@ -5,8 +5,8 @@ import ( "path/filepath" version "github.com/hashicorp/go-version" + "github.com/runatlantis/atlantis/server/core/runtime/common" "github.com/runatlantis/atlantis/server/events/models" - "github.com/runatlantis/atlantis/server/events/runtime/common" ) // InitStep runs `terraform init`. @@ -49,8 +49,6 @@ func (i *InitStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []strin terraformInitArgs = []string{} } - terraformInitArgs = append(terraformInitArgs, "-no-color") - if MustConstraint("< 0.14.0").Check(tfVersion) || !common.FileExists(terraformLockfilePath) { terraformInitArgs = append(terraformInitArgs, "-upgrade") } @@ -59,7 +57,7 @@ func (i *InitStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []strin terraformInitCmd := append(terraformInitVerb, finalArgs...) - out, err := i.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, terraformInitCmd, envs, tfVersion, ctx.Workspace) + out, err := i.TerraformExecutor.RunCommandWithVersion(ctx, path, terraformInitCmd, envs, tfVersion, ctx.Workspace) // Only include the init output if there was an error. Otherwise it's // unnecessary and lengthens the comment. if err != nil { diff --git a/server/core/runtime/init_step_runner_test.go b/server/core/runtime/init_step_runner_test.go index 496f13f7a7..a47c5da094 100644 --- a/server/core/runtime/init_step_runner_test.go +++ b/server/core/runtime/init_step_runner_test.go @@ -1,7 +1,7 @@ package runtime_test import ( - "io/ioutil" + "os" "os/exec" "path/filepath" "strings" @@ -14,9 +14,9 @@ import ( "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/core/terraform/mocks" matchers2 "github.com/runatlantis/atlantis/server/core/terraform/mocks/matchers" + "github.com/runatlantis/atlantis/server/events/mocks/matchers" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" - logging_matchers "github.com/runatlantis/atlantis/server/logging/mocks/matchers" . "github.com/runatlantis/atlantis/testing" ) @@ -49,30 +49,31 @@ func TestRun_UsesGetOrInitForRightVersion(t *testing.T) { terraform := mocks.NewMockClient() logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ + Workspace: "workspace", + RepoRelDir: ".", + Log: logger, + } tfVersion, _ := version.NewVersion(c.version) iso := runtime.InitStepRunner{ TerraformExecutor: terraform, DefaultTFVersion: tfVersion, } - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", nil) - output, err := iso.Run(models.ProjectCommandContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - }, []string{"extra", "args"}, "/path", map[string]string(nil)) + output, err := iso.Run(ctx, []string{"extra", "args"}, "/path", map[string]string(nil)) Ok(t, err) // When there is no error, should not return init output to PR. Equals(t, "", output) // If using init then we specify -input=false but not for get. - expArgs := []string{c.expCmd, "-input=false", "-no-color", "-upgrade", "extra", "args"} + expArgs := []string{c.expCmd, "-input=false", "-upgrade", "extra", "args"} if c.expCmd == "get" { - expArgs = []string{c.expCmd, "-no-color", "-upgrade", "extra", "args"} + expArgs = []string{c.expCmd, "-upgrade", "extra", "args"} } - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", expArgs, map[string]string(nil), tfVersion, "workspace") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, "/path", expArgs, map[string]string(nil), tfVersion, "workspace") }) } } @@ -82,7 +83,7 @@ func TestRun_ShowInitOutputOnError(t *testing.T) { RegisterMockTestingT(t) tfClient := mocks.NewMockClient() logger := logging.NewNoopLogger(t) - When(tfClient.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(tfClient.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", errors.New("error")) tfVersion, _ := version.NewVersion("0.11.0") @@ -106,36 +107,37 @@ func TestRun_InitOmitsUpgradeFlagIfLockFileTracked(t *testing.T) { defer cleanup() lockFilePath := filepath.Join(repoDir, ".terraform.lock.hcl") - err := ioutil.WriteFile(lockFilePath, nil, 0600) + err := os.WriteFile(lockFilePath, nil, 0600) Ok(t, err) // commit lock file runCmd(t, repoDir, "git", "add", ".terraform.lock.hcl") runCmd(t, repoDir, "git", "commit", "-m", "add .terraform.lock.hcl") + logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ + Workspace: "workspace", + RepoRelDir: ".", + Log: logger, + } + RegisterMockTestingT(t) terraform := mocks.NewMockClient() - logger := logging.NewNoopLogger(t) - tfVersion, _ := version.NewVersion("0.14.0") iso := runtime.InitStepRunner{ TerraformExecutor: terraform, DefaultTFVersion: tfVersion, } - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", nil) - output, err := iso.Run(models.ProjectCommandContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - }, []string{"extra", "args"}, repoDir, map[string]string(nil)) + output, err := iso.Run(ctx, []string{"extra", "args"}, repoDir, map[string]string(nil)) Ok(t, err) // When there is no error, should not return init output to PR. Equals(t, "", output) - expectedArgs := []string{"init", "-input=false", "-no-color", "extra", "args"} - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, repoDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") + expectedArgs := []string{"init", "-input=false", "extra", "args"} + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, repoDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") } func TestRun_InitKeepsUpgradeFlagIfLockFileNotPresent(t *testing.T) { @@ -144,61 +146,62 @@ func TestRun_InitKeepsUpgradeFlagIfLockFileNotPresent(t *testing.T) { RegisterMockTestingT(t) terraform := mocks.NewMockClient() - logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ + Workspace: "workspace", + RepoRelDir: ".", + Log: logger, + } tfVersion, _ := version.NewVersion("0.14.0") iso := runtime.InitStepRunner{ TerraformExecutor: terraform, DefaultTFVersion: tfVersion, } - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", nil) - output, err := iso.Run(models.ProjectCommandContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - }, []string{"extra", "args"}, tmpDir, map[string]string(nil)) + output, err := iso.Run(ctx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) Ok(t, err) // When there is no error, should not return init output to PR. Equals(t, "", output) - expectedArgs := []string{"init", "-input=false", "-no-color", "-upgrade", "extra", "args"} - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, tmpDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") + expectedArgs := []string{"init", "-input=false", "-upgrade", "extra", "args"} + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, tmpDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") } func TestRun_InitKeepUpgradeFlagIfLockFilePresentAndTFLessThanPoint14(t *testing.T) { tmpDir, cleanup := TempDir(t) defer cleanup() lockFilePath := filepath.Join(tmpDir, ".terraform.lock.hcl") - err := ioutil.WriteFile(lockFilePath, nil, 0600) + err := os.WriteFile(lockFilePath, nil, 0600) Ok(t, err) RegisterMockTestingT(t) terraform := mocks.NewMockClient() logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ + Workspace: "workspace", + RepoRelDir: ".", + Log: logger, + } tfVersion, _ := version.NewVersion("0.13.0") iso := runtime.InitStepRunner{ TerraformExecutor: terraform, DefaultTFVersion: tfVersion, } - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", nil) - output, err := iso.Run(models.ProjectCommandContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - }, []string{"extra", "args"}, tmpDir, map[string]string(nil)) + output, err := iso.Run(ctx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) Ok(t, err) // When there is no error, should not return init output to PR. Equals(t, "", output) - expectedArgs := []string{"init", "-input=false", "-no-color", "-upgrade", "extra", "args"} - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, tmpDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") + expectedArgs := []string{"init", "-input=false", "-upgrade", "extra", "args"} + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, tmpDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") } func TestRun_InitExtraArgsDeDupe(t *testing.T) { @@ -211,32 +214,32 @@ func TestRun_InitExtraArgsDeDupe(t *testing.T) { { "No extra args", []string{}, - []string{"init", "-input=false", "-no-color", "-upgrade"}, + []string{"init", "-input=false", "-upgrade"}, }, { "Override -upgrade", []string{"-upgrade=false"}, - []string{"init", "-input=false", "-no-color", "-upgrade=false"}, + []string{"init", "-input=false", "-upgrade=false"}, }, { "Override -input", []string{"-input=true"}, - []string{"init", "-input=true", "-no-color", "-upgrade"}, + []string{"init", "-input=true", "-upgrade"}, }, { "Override -input and -upgrade", []string{"-input=true", "-upgrade=false"}, - []string{"init", "-input=true", "-no-color", "-upgrade=false"}, + []string{"init", "-input=true", "-upgrade=false"}, }, { "Non duplicate extra args", []string{"extra", "args"}, - []string{"init", "-input=false", "-no-color", "-upgrade", "extra", "args"}, + []string{"init", "-input=false", "-upgrade", "extra", "args"}, }, { "Override upgrade with extra args", []string{"extra", "args", "-upgrade=false"}, - []string{"init", "-input=false", "-no-color", "-upgrade=false", "extra", "args"}, + []string{"init", "-input=false", "-upgrade=false", "extra", "args"}, }, } @@ -245,25 +248,26 @@ func TestRun_InitExtraArgsDeDupe(t *testing.T) { terraform := mocks.NewMockClient() logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ + Workspace: "workspace", + RepoRelDir: ".", + Log: logger, + } tfVersion, _ := version.NewVersion("0.10.0") iso := runtime.InitStepRunner{ TerraformExecutor: terraform, DefaultTFVersion: tfVersion, } - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", nil) - output, err := iso.Run(models.ProjectCommandContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - }, c.extraArgs, "/path", map[string]string(nil)) + output, err := iso.Run(ctx, c.extraArgs, "/path", map[string]string(nil)) Ok(t, err) // When there is no error, should not return init output to PR. Equals(t, "", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", c.expectedArgs, map[string]string(nil), tfVersion, "workspace") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, "/path", c.expectedArgs, map[string]string(nil), tfVersion, "workspace") }) } } @@ -274,7 +278,7 @@ func TestRun_InitDeletesLockFileIfPresentAndNotTracked(t *testing.T) { defer cleanup() lockFilePath := filepath.Join(repoDir, ".terraform.lock.hcl") - err := ioutil.WriteFile(lockFilePath, nil, 0600) + err := os.WriteFile(lockFilePath, nil, 0600) Ok(t, err) RegisterMockTestingT(t) @@ -287,20 +291,21 @@ func TestRun_InitDeletesLockFileIfPresentAndNotTracked(t *testing.T) { TerraformExecutor: terraform, DefaultTFVersion: tfVersion, } - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", nil) - output, err := iso.Run(models.ProjectCommandContext{ + ctx := models.ProjectCommandContext{ Workspace: "workspace", RepoRelDir: ".", Log: logger, - }, []string{"extra", "args"}, repoDir, map[string]string(nil)) + } + output, err := iso.Run(ctx, []string{"extra", "args"}, repoDir, map[string]string(nil)) Ok(t, err) // When there is no error, should not return init output to PR. Equals(t, "", output) - expectedArgs := []string{"init", "-input=false", "-no-color", "-upgrade", "extra", "args"} - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, repoDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") + expectedArgs := []string{"init", "-input=false", "-upgrade", "extra", "args"} + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, repoDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") } func runCmd(t *testing.T, dir string, name string, args ...string) string { diff --git a/server/core/runtime/mocks/matchers/models_approvalstatus.go b/server/core/runtime/mocks/matchers/models_approvalstatus.go new file mode 100644 index 0000000000..01b76dd968 --- /dev/null +++ b/server/core/runtime/mocks/matchers/models_approvalstatus.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + models "github.com/runatlantis/atlantis/server/events/models" +) + +func AnyModelsApprovalStatus() models.ApprovalStatus { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.ApprovalStatus))(nil)).Elem())) + var nullValue models.ApprovalStatus + return nullValue +} + +func EqModelsApprovalStatus(value models.ApprovalStatus) models.ApprovalStatus { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue models.ApprovalStatus + return nullValue +} + +func NotEqModelsApprovalStatus(value models.ApprovalStatus) models.ApprovalStatus { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue models.ApprovalStatus + return nullValue +} + +func ModelsApprovalStatusThat(matcher pegomock.ArgumentMatcher) models.ApprovalStatus { + pegomock.RegisterMatcher(matcher) + var nullValue models.ApprovalStatus + return nullValue +} diff --git a/server/core/runtime/mocks/mock_pull_approved_checker.go b/server/core/runtime/mocks/mock_pull_approved_checker.go index 9c07901003..a71dd3506c 100644 --- a/server/core/runtime/mocks/mock_pull_approved_checker.go +++ b/server/core/runtime/mocks/mock_pull_approved_checker.go @@ -25,17 +25,17 @@ func NewMockPullApprovedChecker(options ...pegomock.Option) *MockPullApprovedChe func (mock *MockPullApprovedChecker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockPullApprovedChecker) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (bool, error) { +func (mock *MockPullApprovedChecker) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) (models.ApprovalStatus, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockPullApprovedChecker().") } - params := []pegomock.Param{baseRepo, pull} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool + params := []pegomock.Param{_param0, _param1} + result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*models.ApprovalStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 models.ApprovalStatus var ret1 error if len(result) != 0 { if result[0] != nil { - ret0 = result[0].(bool) + ret0 = result[0].(models.ApprovalStatus) } if result[1] != nil { ret1 = result[1].(error) @@ -81,8 +81,8 @@ type VerifierMockPullApprovedChecker struct { timeout time.Duration } -func (verifier *VerifierMockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) *MockPullApprovedChecker_PullIsApproved_OngoingVerification { - params := []pegomock.Param{baseRepo, pull} +func (verifier *VerifierMockPullApprovedChecker) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) *MockPullApprovedChecker_PullIsApproved_OngoingVerification { + params := []pegomock.Param{_param0, _param1} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) return &MockPullApprovedChecker_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -93,8 +93,8 @@ type MockPullApprovedChecker_PullIsApproved_OngoingVerification struct { } func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - baseRepo, pull := c.GetAllCapturedArguments() - return baseRepo[len(baseRepo)-1], pull[len(pull)-1] + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] } func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { diff --git a/server/core/runtime/plan_step_runner.go b/server/core/runtime/plan_step_runner.go index ea21139e11..f7dba9402f 100644 --- a/server/core/runtime/plan_step_runner.go +++ b/server/core/runtime/plan_step_runner.go @@ -2,7 +2,6 @@ package runtime import ( "fmt" - "io/ioutil" "os" "path/filepath" "regexp" @@ -46,7 +45,7 @@ func (p *PlanStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []strin planFile := filepath.Join(path, GetPlanFilename(ctx.Workspace, ctx.ProjectName)) planCmd := p.buildPlanCmd(ctx, extraArgs, path, tfVersion, planFile) - output, err := p.TerraformExecutor.RunCommandWithVersion(ctx.Log, filepath.Clean(path), planCmd, envs, tfVersion, ctx.Workspace) + output, err := p.TerraformExecutor.RunCommandWithVersion(ctx, filepath.Clean(path), planCmd, envs, tfVersion, ctx.Workspace) if p.isRemoteOpsErr(output, err) { ctx.Log.Debug("detected that this project is using TFE remote ops") return p.remotePlan(ctx, extraArgs, path, tfVersion, planFile, envs) @@ -70,7 +69,7 @@ func (p *PlanStepRunner) isRemoteOpsErr(output string, err error) bool { // operations. func (p *PlanStepRunner) remotePlan(ctx models.ProjectCommandContext, extraArgs []string, path string, tfVersion *version.Version, planFile string, envs map[string]string) (string, error) { argList := [][]string{ - {"plan", "-input=false", "-refresh", "-no-color"}, + {"plan", "-input=false", "-refresh"}, extraArgs, ctx.EscapedCommentArgs, } @@ -92,7 +91,7 @@ func (p *PlanStepRunner) remotePlan(ctx models.ProjectCommandContext, extraArgs // We also prepend our own remote ops header to the file so during apply we // know this is a remote apply. - err = ioutil.WriteFile(planFile, []byte(remoteOpsHeader+planOutput), 0600) + err = os.WriteFile(planFile, []byte(remoteOpsHeader+planOutput), 0600) if err != nil { return output, errors.Wrap(err, "unable to create planfile for remote ops") } @@ -125,7 +124,7 @@ func (p *PlanStepRunner) switchWorkspace(ctx models.ProjectCommandContext, path // already in the right workspace then no need to switch. This will save us // about ten seconds. This command is only available in > 0.10. if !runningZeroPointNine { - workspaceShowOutput, err := p.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, []string{workspaceCmd, "show"}, envs, tfVersion, ctx.Workspace) + workspaceShowOutput, err := p.TerraformExecutor.RunCommandWithVersion(ctx, path, []string{workspaceCmd, "show"}, envs, tfVersion, ctx.Workspace) if err != nil { return err } @@ -140,11 +139,11 @@ func (p *PlanStepRunner) switchWorkspace(ctx models.ProjectCommandContext, path // To do this we can either select and catch the error or use list and then // look for the workspace. Both commands take the same amount of time so // that's why we're running select here. - _, err := p.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, []string{workspaceCmd, "select", "-no-color", ctx.Workspace}, envs, tfVersion, ctx.Workspace) + _, err := p.TerraformExecutor.RunCommandWithVersion(ctx, path, []string{workspaceCmd, "select", ctx.Workspace}, envs, tfVersion, ctx.Workspace) if err != nil { // If terraform workspace select fails we run terraform workspace // new to create a new workspace automatically. - out, err := p.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, []string{workspaceCmd, "new", "-no-color", ctx.Workspace}, envs, tfVersion, ctx.Workspace) + out, err := p.TerraformExecutor.RunCommandWithVersion(ctx, path, []string{workspaceCmd, "new", ctx.Workspace}, envs, tfVersion, ctx.Workspace) if err != nil { return fmt.Errorf("%s: %s", err, out) } @@ -168,7 +167,7 @@ func (p *PlanStepRunner) buildPlanCmd(ctx models.ProjectCommandContext, extraArg argList := [][]string{ // NOTE: we need to quote the plan filename because Bitbucket Server can // have spaces in its repo owner names. - {"plan", "-input=false", "-refresh", "-no-color", "-out", fmt.Sprintf("%q", planFile)}, + {"plan", "-input=false", "-refresh", "-out", fmt.Sprintf("%q", planFile)}, tfVars, extraArgs, ctx.EscapedCommentArgs, @@ -253,7 +252,7 @@ func (p *PlanStepRunner) runRemotePlan( // Start the async command execution. ctx.Log.Debug("starting async tf remote operation") - _, outCh := p.AsyncTFExec.RunCommandAsync(ctx.Log, filepath.Clean(path), cmdArgs, envs, tfVersion, ctx.Workspace) + _, outCh := p.AsyncTFExec.RunCommandAsync(ctx, filepath.Clean(path), cmdArgs, envs, tfVersion, ctx.Workspace) var lines []string nextLineIsRunURL := false var runURL string diff --git a/server/core/runtime/plan_step_runner_test.go b/server/core/runtime/plan_step_runner_test.go index d16d531a30..1f609fc023 100644 --- a/server/core/runtime/plan_step_runner_test.go +++ b/server/core/runtime/plan_step_runner_test.go @@ -2,7 +2,6 @@ package runtime_test import ( "fmt" - "io/ioutil" "os" "path/filepath" "strings" @@ -20,7 +19,7 @@ import ( "github.com/runatlantis/atlantis/server/events/mocks/matchers" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" - logging_matchers "github.com/runatlantis/atlantis/server/logging/mocks/matchers" + . "github.com/runatlantis/atlantis/testing" ) @@ -30,16 +29,10 @@ func TestRun_NoWorkspaceIn08(t *testing.T) { terraform := mocks.NewMockClient() tfVersion, _ := version.NewVersion("0.8") - logger := logging.NewNoopLogger(t) - workspace := "default" - s := runtime.PlanStepRunner{ - DefaultTFVersion: tfVersion, - TerraformExecutor: terraform, - } - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - output, err := s.Run(models.ProjectCommandContext{ + workspace := "default" + logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ Log: logger, EscapedCommentArgs: []string{"comment", "args"}, Workspace: workspace, @@ -53,17 +46,24 @@ func TestRun_NoWorkspaceIn08(t *testing.T) { Owner: "owner", Name: "repo", }, - }, []string{"extra", "args"}, "/path", map[string]string(nil)) + } + s := runtime.PlanStepRunner{ + DefaultTFVersion: tfVersion, + TerraformExecutor: terraform, + } + + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + ThenReturn("output", nil) + output, err := s.Run(ctx, []string{"extra", "args"}, "/path", map[string]string(nil)) Ok(t, err) Equals(t, "output", output) terraform.VerifyWasCalledOnce().RunCommandWithVersion( - logger, + ctx, "/path", []string{"plan", "-input=false", "-refresh", - "-no-color", "-out", "\"/path/default.tfplan\"", "-var", @@ -85,20 +85,18 @@ func TestRun_NoWorkspaceIn08(t *testing.T) { workspace) // Verify that no env or workspace commands were run - terraform.VerifyWasCalled(Never()).RunCommandWithVersion(logger, + terraform.VerifyWasCalled(Never()).RunCommandWithVersion(ctx, "/path", []string{"env", "select", - "-no-color", "workspace"}, map[string]string(nil), tfVersion, workspace) - terraform.VerifyWasCalled(Never()).RunCommandWithVersion(logger, + terraform.VerifyWasCalled(Never()).RunCommandWithVersion(ctx, "/path", []string{"workspace", "select", - "-no-color", "workspace"}, map[string]string(nil), tfVersion, @@ -119,7 +117,7 @@ func TestRun_ErrWorkspaceIn08(t *testing.T) { DefaultTFVersion: tfVersion, } - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). ThenReturn("output", nil) _, err := s.Run(models.ProjectCommandContext{ Log: logger, @@ -161,15 +159,7 @@ func TestRun_SwitchesWorkspace(t *testing.T) { tfVersion, _ := version.NewVersion(c.tfVersion) logger := logging.NewNoopLogger(t) - - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - - When(terraform.RunCommandWithVersion(logging_matchers.AnyLoggingSimpleLogging(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - output, err := s.Run(models.ProjectCommandContext{ + ctx := models.ProjectCommandContext{ Log: logger, Workspace: "workspace", RepoRelDir: ".", @@ -183,26 +173,32 @@ func TestRun_SwitchesWorkspace(t *testing.T) { Owner: "owner", Name: "repo", }, - }, []string{"extra", "args"}, "/path", map[string]string(nil)) + } + s := runtime.PlanStepRunner{ + TerraformExecutor: terraform, + DefaultTFVersion: tfVersion, + } + + When(terraform.RunCommandWithVersion(matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). + ThenReturn("output", nil) + output, err := s.Run(ctx, []string{"extra", "args"}, "/path", map[string]string(nil)) Ok(t, err) Equals(t, "output", output) // Verify that env select was called as well as plan. - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, "/path", []string{c.expWorkspaceCmd, "select", - "-no-color", "workspace"}, map[string]string(nil), tfVersion, "workspace") - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, "/path", []string{"plan", "-input=false", "-refresh", - "-no-color", "-out", "\"/path/workspace.tfplan\"", "-var", @@ -257,6 +253,21 @@ func TestRun_CreatesWorkspace(t *testing.T) { terraform := mocks.NewMockClient() tfVersion, _ := version.NewVersion(c.tfVersion) logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "workspace", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } s := runtime.PlanStepRunner{ TerraformExecutor: terraform, DefaultTFVersion: tfVersion, @@ -264,15 +275,14 @@ func TestRun_CreatesWorkspace(t *testing.T) { // Ensure that we actually try to switch workspaces by making the // output of `workspace show` to be a different name. - When(terraform.RunCommandWithVersion(logger, "/path", []string{"workspace", "show"}, map[string]string(nil), tfVersion, "workspace")).ThenReturn("diffworkspace\n", nil) + When(terraform.RunCommandWithVersion(ctx, "/path", []string{"workspace", "show"}, map[string]string(nil), tfVersion, "workspace")).ThenReturn("diffworkspace\n", nil) - expWorkspaceArgs := []string{c.expWorkspaceCommand, "select", "-no-color", "workspace"} - When(terraform.RunCommandWithVersion(logger, "/path", expWorkspaceArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("", errors.New("workspace does not exist")) + expWorkspaceArgs := []string{c.expWorkspaceCommand, "select", "workspace"} + When(terraform.RunCommandWithVersion(ctx, "/path", expWorkspaceArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("", errors.New("workspace does not exist")) expPlanArgs := []string{"plan", "-input=false", "-refresh", - "-no-color", "-out", "\"/path/workspace.tfplan\"", "-var", @@ -289,29 +299,15 @@ func TestRun_CreatesWorkspace(t *testing.T) { "args", "comment", "args"} - When(terraform.RunCommandWithVersion(logger, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) + When(terraform.RunCommandWithVersion(ctx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) - output, err := s.Run(models.ProjectCommandContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - }, []string{"extra", "args"}, "/path", map[string]string(nil)) + output, err := s.Run(ctx, []string{"extra", "args"}, "/path", map[string]string(nil)) Ok(t, err) Equals(t, "output", output) // Verify that env select was called as well as plan. - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", expWorkspaceArgs, map[string]string(nil), tfVersion, "workspace") - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, "/path", expWorkspaceArgs, map[string]string(nil), tfVersion, "workspace") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace") }) } } @@ -323,16 +319,30 @@ func TestRun_NoWorkspaceSwitchIfNotNecessary(t *testing.T) { terraform := mocks.NewMockClient() tfVersion, _ := version.NewVersion("0.10.0") logger := logging.NewNoopLogger(t) + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "workspace", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } s := runtime.PlanStepRunner{ TerraformExecutor: terraform, DefaultTFVersion: tfVersion, } - When(terraform.RunCommandWithVersion(logger, "/path", []string{"workspace", "show"}, map[string]string(nil), tfVersion, "workspace")).ThenReturn("workspace\n", nil) + When(terraform.RunCommandWithVersion(ctx, "/path", []string{"workspace", "show"}, map[string]string(nil), tfVersion, "workspace")).ThenReturn("workspace\n", nil) expPlanArgs := []string{"plan", "-input=false", "-refresh", - "-no-color", "-out", "\"/path/workspace.tfplan\"", "-var", @@ -349,30 +359,16 @@ func TestRun_NoWorkspaceSwitchIfNotNecessary(t *testing.T) { "args", "comment", "args"} - When(terraform.RunCommandWithVersion(logger, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) + When(terraform.RunCommandWithVersion(ctx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) - output, err := s.Run(models.ProjectCommandContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - }, []string{"extra", "args"}, "/path", map[string]string(nil)) + output, err := s.Run(ctx, []string{"extra", "args"}, "/path", map[string]string(nil)) Ok(t, err) Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace") // Verify that workspace select was never called. - terraform.VerifyWasCalled(Never()).RunCommandWithVersion(logger, "/path", []string{"workspace", "select", "-no-color", "workspace"}, map[string]string(nil), tfVersion, "workspace") + terraform.VerifyWasCalled(Never()).RunCommandWithVersion(ctx, "/path", []string{"workspace", "select", "workspace"}, map[string]string(nil), tfVersion, "workspace") } func TestRun_AddsEnvVarFile(t *testing.T) { @@ -386,7 +382,7 @@ func TestRun_AddsEnvVarFile(t *testing.T) { err := os.MkdirAll(filepath.Join(tmpDir, "env"), 0700) Ok(t, err) envVarsFile := filepath.Join(tmpDir, "env/workspace.tfvars") - err = ioutil.WriteFile(envVarsFile, nil, 0600) + err = os.WriteFile(envVarsFile, nil, 0600) Ok(t, err) // Using version >= 0.10 here so we don't expect any env commands. @@ -400,7 +396,6 @@ func TestRun_AddsEnvVarFile(t *testing.T) { expPlanArgs := []string{"plan", "-input=false", "-refresh", - "-no-color", "-out", fmt.Sprintf("%q", filepath.Join(tmpDir, "workspace.tfplan")), "-var", @@ -420,9 +415,7 @@ func TestRun_AddsEnvVarFile(t *testing.T) { "-var-file", envVarsFile, } - When(terraform.RunCommandWithVersion(logger, tmpDir, expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) - - output, err := s.Run(models.ProjectCommandContext{ + ctx := models.ProjectCommandContext{ Log: logger, Workspace: "workspace", RepoRelDir: ".", @@ -436,12 +429,15 @@ func TestRun_AddsEnvVarFile(t *testing.T) { Owner: "owner", Name: "repo", }, - }, []string{"extra", "args"}, tmpDir, map[string]string(nil)) + } + When(terraform.RunCommandWithVersion(ctx, tmpDir, expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) + + output, err := s.Run(ctx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) Ok(t, err) // Verify that env select was never called since we're in version >= 0.10 - terraform.VerifyWasCalled(Never()).RunCommandWithVersion(logger, tmpDir, []string{"env", "select", "-no-color", "workspace"}, map[string]string(nil), tfVersion, "workspace") - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, tmpDir, expPlanArgs, map[string]string(nil), tfVersion, "workspace") + terraform.VerifyWasCalled(Never()).RunCommandWithVersion(ctx, tmpDir, []string{"env", "select", "workspace"}, map[string]string(nil), tfVersion, "workspace") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, tmpDir, expPlanArgs, map[string]string(nil), tfVersion, "workspace") Equals(t, "output", output) } @@ -456,12 +452,27 @@ func TestRun_UsesDiffPathForProject(t *testing.T) { TerraformExecutor: terraform, DefaultTFVersion: tfVersion, } - When(terraform.RunCommandWithVersion(logger, "/path", []string{"workspace", "show"}, map[string]string(nil), tfVersion, "workspace")).ThenReturn("workspace\n", nil) + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + ProjectName: "projectname", + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } + When(terraform.RunCommandWithVersion(ctx, "/path", []string{"workspace", "show"}, map[string]string(nil), tfVersion, "workspace")).ThenReturn("workspace\n", nil) expPlanArgs := []string{"plan", "-input=false", "-refresh", - "-no-color", "-out", "\"/path/projectname-default.tfplan\"", "-var", @@ -479,24 +490,9 @@ func TestRun_UsesDiffPathForProject(t *testing.T) { "comment", "args", } - When(terraform.RunCommandWithVersion(logger, "/path", expPlanArgs, map[string]string(nil), tfVersion, "default")).ThenReturn("output", nil) + When(terraform.RunCommandWithVersion(ctx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "default")).ThenReturn("output", nil) - output, err := s.Run(models.ProjectCommandContext{ - Log: logger, - Workspace: "default", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - ProjectName: "projectname", - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - }, []string{"extra", "args"}, "/path", map[string]string(nil)) + output, err := s.Run(ctx, []string{"extra", "args"}, "/path", map[string]string(nil)) Ok(t, err) Equals(t, "output", output) } @@ -537,7 +533,7 @@ Terraform will perform the following actions: DefaultTFVersion: tfVersion, } When(terraform.RunCommandWithVersion( - matchers.AnyPtrToLoggingSimpleLogger(), + matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), @@ -591,7 +587,7 @@ func TestRun_OutputOnErr(t *testing.T) { expOutput := "expected output" expErrMsg := "error!" When(terraform.RunCommandWithVersion( - matchers.AnyPtrToLoggingSimpleLogger(), + matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), @@ -624,7 +620,6 @@ func TestRun_NoOptionalVarsIn012(t *testing.T) { "plan", "-input=false", "-refresh", - "-no-color", "-out", fmt.Sprintf("%q", "/path/default.tfplan"), "extra", @@ -651,7 +646,7 @@ func TestRun_NoOptionalVarsIn012(t *testing.T) { t.Run(c.name, func(t *testing.T) { terraform := mocks.NewMockClient() When(terraform.RunCommandWithVersion( - matchers.AnyPtrToLoggingSimpleLogger(), + matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), @@ -663,8 +658,7 @@ func TestRun_NoOptionalVarsIn012(t *testing.T) { TerraformExecutor: terraform, DefaultTFVersion: tfVersion, } - - output, err := s.Run(models.ProjectCommandContext{ + ctx := models.ProjectCommandContext{ Workspace: "default", RepoRelDir: ".", User: models.User{Username: "username"}, @@ -677,11 +671,13 @@ func TestRun_NoOptionalVarsIn012(t *testing.T) { Owner: "owner", Name: "repo", }, - }, []string{"extra", "args"}, "/path", map[string]string(nil)) + } + + output, err := s.Run(ctx, []string{"extra", "args"}, "/path", map[string]string(nil)) Ok(t, err) Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(nil, "/path", expPlanArgs, map[string]string(nil), tfVersion, "default") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "default") }) } @@ -707,13 +703,28 @@ locally at this time. t.Run(name, func(t *testing.T) { logger := logging.NewNoopLogger(t) - + // Now that mocking is set up, we're ready to run the plan. + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } RegisterMockTestingT(t) terraform := mocks.NewMockClient() - asyncTf := &remotePlanMock{} tfVersion, _ := version.NewVersion("0.11.12") updater := mocks2.NewMockCommitStatusUpdater() + asyncTf := &remotePlanMock{} s := runtime.PlanStepRunner{ TerraformExecutor: terraform, DefaultTFVersion: tfVersion, @@ -725,7 +736,7 @@ locally at this time. // First, terraform workspace gets run. When(terraform.RunCommandWithVersion( - logger, + ctx, absProjectPath, []string{"workspace", "show"}, map[string]string(nil), @@ -736,7 +747,6 @@ locally at this time. expPlanArgs := []string{"plan", "-input=false", "-refresh", - "-no-color", "-out", fmt.Sprintf("%q", filepath.Join(absProjectPath, "default.tfplan")), "-var", @@ -758,25 +768,9 @@ locally at this time. planErr := errors.New("exit status 1: err") planOutput := "\n" + remoteOpsErr asyncTf.LinesToSend = remotePlanOutput - When(terraform.RunCommandWithVersion(logger, absProjectPath, expPlanArgs, map[string]string(nil), tfVersion, "default")). + When(terraform.RunCommandWithVersion(ctx, absProjectPath, expPlanArgs, map[string]string(nil), tfVersion, "default")). ThenReturn(planOutput, planErr) - // Now that mocking is set up, we're ready to run the plan. - ctx := models.ProjectCommandContext{ - Log: logger, - Workspace: "default", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } output, err := s.Run(ctx, []string{"extra", "args"}, absProjectPath, map[string]string(nil)) Ok(t, err) Equals(t, ` @@ -791,11 +785,11 @@ Terraform will perform the following actions: Plan: 0 to add, 0 to change, 1 to destroy.`, output) - expRemotePlanArgs := []string{"plan", "-input=false", "-refresh", "-no-color", "extra", "args", "comment", "args"} + expRemotePlanArgs := []string{"plan", "-input=false", "-refresh", "extra", "args", "comment", "args"} Equals(t, expRemotePlanArgs, asyncTf.CalledArgs) // Verify that the fake plan file we write has the correct contents. - bytes, err := ioutil.ReadFile(filepath.Join(absProjectPath, "default.tfplan")) + bytes, err := os.ReadFile(filepath.Join(absProjectPath, "default.tfplan")) Ok(t, err) Equals(t, `Atlantis: this plan was created by remote ops @@ -890,7 +884,7 @@ type remotePlanMock struct { CalledArgs []string } -func (r *remotePlanMock) RunCommandAsync(log logging.SimpleLogging, path string, args []string, envs map[string]string, v *version.Version, workspace string) (chan<- string, <-chan terraform.Line) { +func (r *remotePlanMock) RunCommandAsync(ctx models.ProjectCommandContext, path string, args []string, envs map[string]string, v *version.Version, workspace string) (chan<- string, <-chan terraform.Line) { r.CalledArgs = args in := make(chan string) out := make(chan terraform.Line) diff --git a/server/core/runtime/plan_type_step_runner_delegate.go b/server/core/runtime/plan_type_step_runner_delegate.go index a372cd2e00..d2cef7a2d8 100644 --- a/server/core/runtime/plan_type_step_runner_delegate.go +++ b/server/core/runtime/plan_type_step_runner_delegate.go @@ -1,7 +1,7 @@ package runtime import ( - "io/ioutil" + "os" "path/filepath" "github.com/pkg/errors" @@ -40,7 +40,7 @@ type PlanTypeStepRunnerDelegate struct { } func (p *PlanTypeStepRunnerDelegate) isRemotePlan(planFile string) (bool, error) { - data, err := ioutil.ReadFile(planFile) + data, err := os.ReadFile(planFile) if err != nil { return false, errors.Wrapf(err, "unable to read %s", planFile) diff --git a/server/core/runtime/plan_type_step_runner_delegate_test.go b/server/core/runtime/plan_type_step_runner_delegate_test.go index 060e163a68..189f171de9 100644 --- a/server/core/runtime/plan_type_step_runner_delegate_test.go +++ b/server/core/runtime/plan_type_step_runner_delegate_test.go @@ -2,7 +2,7 @@ package runtime import ( "errors" - "io/ioutil" + "os" "path/filepath" "testing" @@ -44,7 +44,7 @@ func TestRunDelegate(t *testing.T) { tmpDir, cleanup := TempDir(t) defer cleanup() planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := ioutil.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) + err := os.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) Ok(t, err) ctx := models.ProjectCommandContext{ @@ -73,7 +73,7 @@ func TestRunDelegate(t *testing.T) { tmpDir, cleanup := TempDir(t) defer cleanup() planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := ioutil.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) + err := os.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) Ok(t, err) ctx := models.ProjectCommandContext{ @@ -102,7 +102,7 @@ func TestRunDelegate(t *testing.T) { tmpDir, cleanup := TempDir(t) defer cleanup() planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := ioutil.WriteFile(planPath, []byte(planFileContents), 0600) + err := os.WriteFile(planPath, []byte(planFileContents), 0600) Ok(t, err) ctx := models.ProjectCommandContext{ @@ -131,7 +131,7 @@ func TestRunDelegate(t *testing.T) { tmpDir, cleanup := TempDir(t) defer cleanup() planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := ioutil.WriteFile(planPath, []byte(planFileContents), 0600) + err := os.WriteFile(planPath, []byte(planFileContents), 0600) Ok(t, err) ctx := models.ProjectCommandContext{ diff --git a/server/core/runtime/policy/conftest_client.go b/server/core/runtime/policy/conftest_client.go index 8adfdc99c4..620361f3c7 100644 --- a/server/core/runtime/policy/conftest_client.go +++ b/server/core/runtime/policy/conftest_client.go @@ -94,7 +94,7 @@ func (p *SourceResolverProxy) Resolve(policySet valid.PolicySet) (string, error) case valid.LocalPolicySet: return p.localSourceResolver.Resolve(policySet) default: - return "", errors.New(fmt.Sprintf("unable to resolve policy set source %s", source)) + return "", fmt.Errorf("unable to resolve policy set source %s", source) } } @@ -237,7 +237,7 @@ func getDefaultVersion() (*version.Version, error) { defaultVersion, exists := os.LookupEnv(DefaultConftestVersionEnvKey) if !exists { - return nil, errors.New(fmt.Sprintf("%s not set.", DefaultConftestVersionEnvKey)) + return nil, fmt.Errorf("%s not set", DefaultConftestVersionEnvKey) } wrappedVersion, err := version.NewVersion(defaultVersion) diff --git a/server/core/runtime/pull_approved_checker.go b/server/core/runtime/pull_approved_checker.go index e77aa2acb1..ade67c9923 100644 --- a/server/core/runtime/pull_approved_checker.go +++ b/server/core/runtime/pull_approved_checker.go @@ -7,5 +7,5 @@ import ( //go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_pull_approved_checker.go PullApprovedChecker type PullApprovedChecker interface { - PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (bool, error) + PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) } diff --git a/server/core/runtime/runtime.go b/server/core/runtime/runtime.go index f3b130c689..7e35e07623 100644 --- a/server/core/runtime/runtime.go +++ b/server/core/runtime/runtime.go @@ -25,7 +25,7 @@ const ( // TerraformExec brings the interface from TerraformClient into this package // without causing circular imports. type TerraformExec interface { - RunCommandWithVersion(log logging.SimpleLogging, path string, args []string, envs map[string]string, v *version.Version, workspace string) (string, error) + RunCommandWithVersion(ctx models.ProjectCommandContext, path string, args []string, envs map[string]string, v *version.Version, workspace string) (string, error) EnsureVersion(log logging.SimpleLogging, v *version.Version) error } @@ -40,7 +40,7 @@ type AsyncTFExec interface { // Callers can use the input channel to pass stdin input to the command. // If any error is passed on the out channel, there will be no // further output (so callers are free to exit). - RunCommandAsync(log logging.SimpleLogging, path string, args []string, envs map[string]string, v *version.Version, workspace string) (chan<- string, <-chan terraform.Line) + RunCommandAsync(ctx models.ProjectCommandContext, path string, args []string, envs map[string]string, v *version.Version, workspace string) (chan<- string, <-chan terraform.Line) } // StatusUpdater brings the interface from CommitStatusUpdater into this package diff --git a/server/core/runtime/show_step_runner.go b/server/core/runtime/show_step_runner.go index 7464636ecd..46b9b3aae4 100644 --- a/server/core/runtime/show_step_runner.go +++ b/server/core/runtime/show_step_runner.go @@ -1,7 +1,6 @@ package runtime import ( - "io/ioutil" "os" "path/filepath" @@ -40,9 +39,9 @@ func (p *ShowStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []strin showResultFile := filepath.Join(path, ctx.GetShowResultFileName()) output, err := p.TerraformExecutor.RunCommandWithVersion( - ctx.Log, + ctx, path, - []string{"show", "-no-color", "-json", filepath.Clean(planFile)}, + []string{"show", "-json", filepath.Clean(planFile)}, envs, tfVersion, ctx.Workspace, @@ -52,7 +51,7 @@ func (p *ShowStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []strin return "", errors.Wrap(err, "running terraform show") } - if err := ioutil.WriteFile(showResultFile, []byte(output), os.ModePerm); err != nil { + if err := os.WriteFile(showResultFile, []byte(output), os.ModePerm); err != nil { return "", errors.Wrap(err, "writing terraform show result") } diff --git a/server/core/runtime/show_step_runner_test.go b/server/core/runtime/show_step_runner_test.go index a01d8fb503..3ff1a5de06 100644 --- a/server/core/runtime/show_step_runner_test.go +++ b/server/core/runtime/show_step_runner_test.go @@ -2,7 +2,8 @@ package runtime import ( "errors" - "io/ioutil" + "fmt" + "os" "path/filepath" "testing" @@ -16,7 +17,7 @@ import ( func TestShowStepRunnner(t *testing.T) { logger := logging.NewNoopLogger(t) - path, _ := ioutil.TempDir("", "") + path, _ := os.MkdirTemp("", "") resultPath := filepath.Join(path, "test-default.json") envs := map[string]string{"key": "val"} tfVersion, _ := version.NewVersion("0.12") @@ -38,18 +39,18 @@ func TestShowStepRunnner(t *testing.T) { t.Run("success", func(t *testing.T) { When(mockExecutor.RunCommandWithVersion( - logger, path, []string{"show", "-no-color", "-json", filepath.Join(path, "test-default.tfplan")}, envs, tfVersion, context.Workspace, + context, path, []string{"show", "-json", filepath.Join(path, "test-default.tfplan")}, envs, tfVersion, context.Workspace, )).ThenReturn("success", nil) r, err := subject.Run(context, []string{}, path, envs) Ok(t, err) - actual, _ := ioutil.ReadFile(resultPath) + actual, _ := os.ReadFile(resultPath) actualStr := string(actual) - Assert(t, actualStr == "success", "got expected result") - Assert(t, r == "success", "returned expected result") + Assert(t, actualStr == "success", fmt.Sprintf("expected '%s' to be success", actualStr)) + Assert(t, r == "success", fmt.Sprintf("expected '%s' to be success", r)) }) @@ -65,14 +66,14 @@ func TestShowStepRunnner(t *testing.T) { } When(mockExecutor.RunCommandWithVersion( - logger, path, []string{"show", "-no-color", "-json", filepath.Join(path, "test-default.tfplan")}, envs, v, context.Workspace, + contextWithVersionOverride, path, []string{"show", "-json", filepath.Join(path, "test-default.tfplan")}, envs, v, context.Workspace, )).ThenReturn("success", nil) r, err := subject.Run(contextWithVersionOverride, []string{}, path, envs) Ok(t, err) - actual, _ := ioutil.ReadFile(resultPath) + actual, _ := os.ReadFile(resultPath) actualStr := string(actual) Assert(t, actualStr == "success", "got expected result") @@ -82,7 +83,7 @@ func TestShowStepRunnner(t *testing.T) { t.Run("failure running command", func(t *testing.T) { When(mockExecutor.RunCommandWithVersion( - logger, path, []string{"show", "-no-color", "-json", filepath.Join(path, "test-default.tfplan")}, envs, tfVersion, context.Workspace, + context, path, []string{"show", "-json", filepath.Join(path, "test-default.tfplan")}, envs, tfVersion, context.Workspace, )).ThenReturn("success", errors.New("error")) _, err := subject.Run(context, []string{}, path, envs) diff --git a/server/core/runtime/version_step_runner.go b/server/core/runtime/version_step_runner.go index d20ea919db..a7369af7c2 100644 --- a/server/core/runtime/version_step_runner.go +++ b/server/core/runtime/version_step_runner.go @@ -21,5 +21,5 @@ func (v *VersionStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []st } versionCmd := []string{"version"} - return v.TerraformExecutor.RunCommandWithVersion(ctx.Log, filepath.Clean(path), versionCmd, envs, tfVersion, ctx.Workspace) + return v.TerraformExecutor.RunCommandWithVersion(ctx, filepath.Clean(path), versionCmd, envs, tfVersion, ctx.Workspace) } diff --git a/server/core/runtime/version_step_runner_test.go b/server/core/runtime/version_step_runner_test.go index 6279b4d037..797e4ba6c0 100644 --- a/server/core/runtime/version_step_runner_test.go +++ b/server/core/runtime/version_step_runner_test.go @@ -44,7 +44,7 @@ func TestRunVersionStep(t *testing.T) { t.Run("ensure runs", func(t *testing.T) { _, err := s.Run(context, []string{}, tmpDir, map[string]string(nil)) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, tmpDir, []string{"version"}, map[string]string(nil), tfVersion, "default") + terraform.VerifyWasCalledOnce().RunCommandWithVersion(context, tmpDir, []string{"version"}, map[string]string(nil), tfVersion, "default") Ok(t, err) }) } diff --git a/server/core/terraform/mocks/mock_terraform_client.go b/server/core/terraform/mocks/mock_terraform_client.go index 05565e2080..95157c3685 100644 --- a/server/core/terraform/mocks/mock_terraform_client.go +++ b/server/core/terraform/mocks/mock_terraform_client.go @@ -4,11 +4,14 @@ package mocks import ( + "reflect" + "time" + go_version "github.com/hashicorp/go-version" pegomock "github.com/petergtz/pegomock" + "github.com/runatlantis/atlantis/server/core/terraform" + "github.com/runatlantis/atlantis/server/events/models" logging "github.com/runatlantis/atlantis/server/logging" - "reflect" - "time" ) type MockClient struct { @@ -26,11 +29,11 @@ func NewMockClient(options ...pegomock.Option) *MockClient { func (mock *MockClient) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockClient) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockClient) RunCommandWithVersion(log logging.SimpleLogging, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) (string, error) { +func (mock *MockClient) RunCommandWithVersion(ctx models.ProjectCommandContext, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{log, path, args, envs, v, workspace} + params := []pegomock.Param{ctx, path, args, envs, v, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("RunCommandWithVersion", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 error @@ -45,6 +48,16 @@ func (mock *MockClient) RunCommandWithVersion(log logging.SimpleLogging, path st return ret0, ret1 } +func (mock *MockClient) RunCommandAsync(ctx models.ProjectCommandContext, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) (chan<- string, <-chan terraform.Line) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockClient().") + } + outCh := make(chan terraform.Line) + inCh := make(chan string) + + return inCh, outCh +} + func (mock *MockClient) EnsureVersion(log logging.SimpleLogging, v *go_version.Version) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") @@ -97,8 +110,8 @@ type VerifierMockClient struct { timeout time.Duration } -func (verifier *VerifierMockClient) RunCommandWithVersion(log logging.SimpleLogging, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) *MockClient_RunCommandWithVersion_OngoingVerification { - params := []pegomock.Param{log, path, args, envs, v, workspace} +func (verifier *VerifierMockClient) RunCommandWithVersion(ctx models.ProjectCommandContext, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) *MockClient_RunCommandWithVersion_OngoingVerification { + params := []pegomock.Param{ctx, path, args, envs, v, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunCommandWithVersion", params, verifier.timeout) return &MockClient_RunCommandWithVersion_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } diff --git a/server/core/terraform/terraform_client.go b/server/core/terraform/terraform_client.go index d6dd77c2e8..c40b245166 100644 --- a/server/core/terraform/terraform_client.go +++ b/server/core/terraform/terraform_client.go @@ -18,7 +18,6 @@ import ( "bufio" "fmt" "io" - "io/ioutil" "os" "os/exec" "path/filepath" @@ -31,9 +30,18 @@ import ( "github.com/hashicorp/go-version" "github.com/mitchellh/go-homedir" "github.com/pkg/errors" + + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/events/terraform/ansi" + "github.com/runatlantis/atlantis/server/handlers" "github.com/runatlantis/atlantis/server/logging" ) +var LogStreamingValidCmds = [...]string{"init", "plan", "apply"} + +// Setting the buffer size to 10mb +const BufioScannerBufferSize = 10 * 1024 * 1024 + //go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_terraform_client.go Client type Client interface { @@ -70,6 +78,8 @@ type DefaultClient struct { // usePluginCache determines whether or not to set the TF_PLUGIN_CACHE_DIR env var usePluginCache bool + + projectCmdOutputHandler handlers.ProjectCommandOutputHandler } //go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_downloader.go Downloader @@ -101,6 +111,7 @@ func NewClientWithDefaultVersion( tfDownloader Downloader, usePluginCache bool, fetchAsync bool, + projectCmdOutputHandler handlers.ProjectCommandOutputHandler, ) (*DefaultClient, error) { var finalDefaultVersion *version.Version var localVersion *version.Version @@ -158,7 +169,6 @@ func NewClientWithDefaultVersion( return nil, err } } - return &DefaultClient{ defaultVersion: finalDefaultVersion, terraformPluginCacheDir: cacheDir, @@ -168,6 +178,7 @@ func NewClientWithDefaultVersion( versionsLock: &versionsLock, versions: versions, usePluginCache: usePluginCache, + projectCmdOutputHandler: projectCmdOutputHandler, }, nil } @@ -182,7 +193,9 @@ func NewTestClient( defaultVersionFlagName string, tfDownloadURL string, tfDownloader Downloader, - usePluginCache bool) (*DefaultClient, error) { + usePluginCache bool, + projectCmdOutputHandler handlers.ProjectCommandOutputHandler, +) (*DefaultClient, error) { return NewClientWithDefaultVersion( log, binDir, @@ -195,6 +208,7 @@ func NewTestClient( tfDownloader, usePluginCache, false, + projectCmdOutputHandler, ) } @@ -216,7 +230,9 @@ func NewClient( defaultVersionFlagName string, tfDownloadURL string, tfDownloader Downloader, - usePluginCache bool) (*DefaultClient, error) { + usePluginCache bool, + projectCmdOutputHandler handlers.ProjectCommandOutputHandler, +) (*DefaultClient, error) { return NewClientWithDefaultVersion( log, binDir, @@ -229,6 +245,7 @@ func NewClient( tfDownloader, usePluginCache, true, + projectCmdOutputHandler, ) } @@ -261,8 +278,25 @@ func (c *DefaultClient) EnsureVersion(log logging.SimpleLogging, v *version.Vers } // See Client.RunCommandWithVersion. -func (c *DefaultClient) RunCommandWithVersion(log logging.SimpleLogging, path string, args []string, customEnvVars map[string]string, v *version.Version, workspace string) (string, error) { - tfCmd, cmd, err := c.prepCmd(log, v, workspace, path, args) +func (c *DefaultClient) RunCommandWithVersion(ctx models.ProjectCommandContext, path string, args []string, customEnvVars map[string]string, v *version.Version, workspace string) (string, error) { + if isAsyncEligibleCommand(args[0]) { + _, outCh := c.RunCommandAsync(ctx, path, args, customEnvVars, v, workspace) + var lines []string + var err error + for line := range outCh { + if line.Err != nil { + err = line.Err + break + } + lines = append(lines, line.Line) + } + output := strings.Join(lines, "\n") + + // sanitize output by stripping out any ansi characters. + output = ansi.Strip(output) + return fmt.Sprintf("%s\n", output), err + } + tfCmd, cmd, err := c.prepCmd(ctx.Log, v, workspace, path, args) if err != nil { return "", err } @@ -274,14 +308,12 @@ func (c *DefaultClient) RunCommandWithVersion(log logging.SimpleLogging, path st out, err := cmd.CombinedOutput() if err != nil { err = errors.Wrapf(err, "running %q in %q", tfCmd, path) - log.Err(err.Error()) - - outputStr := string(out) - log.Debug(outputStr) - return outputStr, err + ctx.Log.Err(err.Error()) + return ansi.Strip(string(out)), err } - log.Info("successfully ran %q in %q", tfCmd, path) - return string(out), nil + ctx.Log.Info("successfully ran %q in %q", tfCmd, path) + + return ansi.Strip(string(out)), nil } // prepCmd builds a ready to execute command based on the version of terraform @@ -344,7 +376,7 @@ type Line struct { // Callers can use the input channel to pass stdin input to the command. // If any error is passed on the out channel, there will be no // further output (so callers are free to exit). -func (c *DefaultClient) RunCommandAsync(log logging.SimpleLogging, path string, args []string, customEnvVars map[string]string, v *version.Version, workspace string) (chan<- string, <-chan Line) { +func (c *DefaultClient) RunCommandAsync(ctx models.ProjectCommandContext, path string, args []string, customEnvVars map[string]string, v *version.Version, workspace string) (chan<- string, <-chan Line) { outCh := make(chan Line) inCh := make(chan string) @@ -358,9 +390,9 @@ func (c *DefaultClient) RunCommandAsync(log logging.SimpleLogging, path string, close(inCh) }() - tfCmd, cmd, err := c.prepCmd(log, v, workspace, path, args) + tfCmd, cmd, err := c.prepCmd(ctx.Log, v, workspace, path, args) if err != nil { - log.Err(err.Error()) + ctx.Log.Err(err.Error()) outCh <- Line{Err: err} return } @@ -373,11 +405,11 @@ func (c *DefaultClient) RunCommandAsync(log logging.SimpleLogging, path string, } cmd.Env = envVars - log.Debug("starting %q in %q", tfCmd, path) + ctx.Log.Debug("starting %q in %q", tfCmd, path) err = cmd.Start() if err != nil { err = errors.Wrapf(err, "running %q in %q", tfCmd, path) - log.Err(err.Error()) + ctx.Log.Err(err.Error()) outCh <- Line{Err: err} return } @@ -386,10 +418,10 @@ func (c *DefaultClient) RunCommandAsync(log logging.SimpleLogging, path string, // This function will exit when inCh is closed which we do in our defer. go func() { for line := range inCh { - log.Debug("writing %q to remote command's stdin", line) + ctx.Log.Debug("writing %q to remote command's stdin", line) _, err := io.WriteString(stdin, line) if err != nil { - log.Err(errors.Wrapf(err, "writing %q to process", line).Error()) + ctx.Log.Err(errors.Wrapf(err, "writing %q to process", line).Error()) } } }() @@ -397,19 +429,25 @@ func (c *DefaultClient) RunCommandAsync(log logging.SimpleLogging, path string, // Use a waitgroup to block until our stdout/err copying is complete. wg := new(sync.WaitGroup) wg.Add(2) - // Asynchronously copy from stdout/err to outCh. go func() { s := bufio.NewScanner(stdout) + buf := []byte{} + s.Buffer(buf, BufioScannerBufferSize) + for s.Scan() { - outCh <- Line{Line: s.Text()} + message := s.Text() + outCh <- Line{Line: message} + c.projectCmdOutputHandler.Send(ctx, message) } wg.Done() }() go func() { s := bufio.NewScanner(stderr) for s.Scan() { - outCh <- Line{Line: s.Text()} + message := s.Text() + outCh <- Line{Line: message} + c.projectCmdOutputHandler.Send(ctx, message) } wg.Done() }() @@ -424,10 +462,10 @@ func (c *DefaultClient) RunCommandAsync(log logging.SimpleLogging, path string, // We're done now. Send an error if there was one. if err != nil { err = errors.Wrapf(err, "running %q in %q", tfCmd, path) - log.Err(err.Error()) + ctx.Log.Err(err.Error()) outCh <- Line{Err: err} } else { - log.Info("successfully ran %q in %q", tfCmd, path) + ctx.Log.Info("successfully ran %q in %q", tfCmd, path) } }() @@ -494,7 +532,7 @@ func generateRCFile(tfeToken string, tfeHostname string, home string) error { // what we would have written to it, then we error out because we don't // want to overwrite anything. if _, err := os.Stat(rcFile); err == nil { - currContents, err := ioutil.ReadFile(rcFile) // nolint: gosec + currContents, err := os.ReadFile(rcFile) // nolint: gosec if err != nil { return errors.Wrapf(err, "trying to read %s to ensure we're not overwriting it", rcFile) } @@ -506,12 +544,21 @@ func generateRCFile(tfeToken string, tfeHostname string, home string) error { return nil } - if err := ioutil.WriteFile(rcFile, []byte(config), 0600); err != nil { + if err := os.WriteFile(rcFile, []byte(config), 0600); err != nil { return errors.Wrapf(err, "writing generated %s file with TFE token to %s", rcFilename, rcFile) } return nil } +func isAsyncEligibleCommand(cmd string) bool { + for _, validCmd := range LogStreamingValidCmds { + if validCmd == cmd { + return true + } + } + return false +} + func getVersion(tfBinary string) (*version.Version, error) { versionOutBytes, err := exec.Command(tfBinary, "version").Output() // #nosec versionOutput := string(versionOutBytes) diff --git a/server/core/terraform/terraform_client_internal_test.go b/server/core/terraform/terraform_client_internal_test.go index 8a447fb828..903724d1f7 100644 --- a/server/core/terraform/terraform_client_internal_test.go +++ b/server/core/terraform/terraform_client_internal_test.go @@ -2,13 +2,14 @@ package terraform import ( "fmt" - "io/ioutil" "os" "path/filepath" "strings" "testing" version "github.com/hashicorp/go-version" + "github.com/runatlantis/atlantis/server/events/models" + handlermocks "github.com/runatlantis/atlantis/server/handlers/mocks" "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) @@ -24,7 +25,7 @@ func TestGenerateRCFile_WritesFile(t *testing.T) { expContents := `credentials "hostname" { token = "token" }` - actContents, err := ioutil.ReadFile(filepath.Join(tmp, ".terraformrc")) + actContents, err := os.ReadFile(filepath.Join(tmp, ".terraformrc")) Ok(t, err) Equals(t, expContents, string(actContents)) } @@ -36,7 +37,7 @@ func TestGenerateRCFile_WillNotOverwrite(t *testing.T) { defer cleanup() rcFile := filepath.Join(tmp, ".terraformrc") - err := ioutil.WriteFile(rcFile, []byte("contents"), 0600) + err := os.WriteFile(rcFile, []byte("contents"), 0600) Ok(t, err) actErr := generateRCFile("token", "hostname", tmp) @@ -54,7 +55,7 @@ func TestGenerateRCFile_NoErrIfContentsSame(t *testing.T) { contents := `credentials "app.terraform.io" { token = "token" }` - err := ioutil.WriteFile(rcFile, []byte(contents), 0600) + err := os.WriteFile(rcFile, []byte(contents), 0600) Ok(t, err) err = generateRCFile("token", "app.terraform.io", tmp) @@ -68,7 +69,7 @@ func TestGenerateRCFile_ErrIfCannotRead(t *testing.T) { defer cleanup() rcFile := filepath.Join(tmp, ".terraformrc") - err := ioutil.WriteFile(rcFile, []byte("can't see me!"), 0000) + err := os.WriteFile(rcFile, []byte("can't see me!"), 0000) Ok(t, err) expErr := fmt.Sprintf("trying to read %s to ensure we're not overwriting it: open %s: permission denied", rcFile, rcFile) @@ -89,12 +90,32 @@ func TestDefaultClient_RunCommandWithVersion_EnvVars(t *testing.T) { v, err := version.NewVersion("0.11.11") Ok(t, err) tmp, cleanup := TempDir(t) + logger := logging.NewNoopLogger(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + ProjectName: "projectname", + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } defer cleanup() client := &DefaultClient{ defaultVersion: v, terraformPluginCacheDir: tmp, overrideTF: "echo", usePluginCache: true, + projectCmdOutputHandler: projectCmdOutputHandler, } args := []string{ @@ -104,8 +125,8 @@ func TestDefaultClient_RunCommandWithVersion_EnvVars(t *testing.T) { "ATLANTIS_TERRAFORM_VERSION=$ATLANTIS_TERRAFORM_VERSION", "DIR=$DIR", } - log := logging.NewNoopLogger(t) - out, err := client.RunCommandWithVersion(log, tmp, args, map[string]string{}, nil, "workspace") + customEnvVars := map[string]string{} + out, err := client.RunCommandWithVersion(ctx, tmp, args, customEnvVars, nil, "workspace") Ok(t, err) exp := fmt.Sprintf("TF_IN_AUTOMATION=true TF_PLUGIN_CACHE_DIR=%s WORKSPACE=workspace ATLANTIS_TERRAFORM_VERSION=0.11.11 DIR=%s\n", tmp, tmp) Equals(t, exp, out) @@ -116,11 +137,31 @@ func TestDefaultClient_RunCommandWithVersion_Error(t *testing.T) { v, err := version.NewVersion("0.11.11") Ok(t, err) tmp, cleanup := TempDir(t) + logger := logging.NewNoopLogger(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + ProjectName: "projectname", + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } defer cleanup() client := &DefaultClient{ defaultVersion: v, terraformPluginCacheDir: tmp, overrideTF: "echo", + projectCmdOutputHandler: projectCmdOutputHandler, } args := []string{ @@ -129,8 +170,7 @@ func TestDefaultClient_RunCommandWithVersion_Error(t *testing.T) { "exit", "1", } - log := logging.NewNoopLogger(t) - out, err := client.RunCommandWithVersion(log, tmp, args, map[string]string{}, nil, "workspace") + out, err := client.RunCommandWithVersion(ctx, tmp, args, map[string]string{}, nil, "workspace") ErrEquals(t, fmt.Sprintf(`running "echo dying && exit 1" in %q: exit status 1`, tmp), err) // Test that we still get our output. Equals(t, "dying\n", out) @@ -140,12 +180,32 @@ func TestDefaultClient_RunCommandAsync_Success(t *testing.T) { v, err := version.NewVersion("0.11.11") Ok(t, err) tmp, cleanup := TempDir(t) + logger := logging.NewNoopLogger(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + ProjectName: "projectname", + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } defer cleanup() client := &DefaultClient{ defaultVersion: v, terraformPluginCacheDir: tmp, overrideTF: "echo", usePluginCache: true, + projectCmdOutputHandler: projectCmdOutputHandler, } args := []string{ @@ -155,8 +215,7 @@ func TestDefaultClient_RunCommandAsync_Success(t *testing.T) { "ATLANTIS_TERRAFORM_VERSION=$ATLANTIS_TERRAFORM_VERSION", "DIR=$DIR", } - log := logging.NewNoopLogger(t) - _, outCh := client.RunCommandAsync(log, tmp, args, map[string]string{}, nil, "workspace") + _, outCh := client.RunCommandAsync(ctx, tmp, args, map[string]string{}, nil, "workspace") out, err := waitCh(outCh) Ok(t, err) @@ -168,11 +227,31 @@ func TestDefaultClient_RunCommandAsync_BigOutput(t *testing.T) { v, err := version.NewVersion("0.11.11") Ok(t, err) tmp, cleanup := TempDir(t) + logger := logging.NewNoopLogger(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + ProjectName: "projectname", + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } defer cleanup() client := &DefaultClient{ defaultVersion: v, terraformPluginCacheDir: tmp, overrideTF: "cat", + projectCmdOutputHandler: projectCmdOutputHandler, } filename := filepath.Join(tmp, "data") f, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) @@ -185,8 +264,7 @@ func TestDefaultClient_RunCommandAsync_BigOutput(t *testing.T) { _, err = f.WriteString(s) Ok(t, err) } - log := logging.NewNoopLogger(t) - _, outCh := client.RunCommandAsync(log, tmp, []string{filename}, map[string]string{}, nil, "workspace") + _, outCh := client.RunCommandAsync(ctx, tmp, []string{filename}, map[string]string{}, nil, "workspace") out, err := waitCh(outCh) Ok(t, err) @@ -197,14 +275,33 @@ func TestDefaultClient_RunCommandAsync_StderrOutput(t *testing.T) { v, err := version.NewVersion("0.11.11") Ok(t, err) tmp, cleanup := TempDir(t) + logger := logging.NewNoopLogger(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + ProjectName: "projectname", + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } defer cleanup() client := &DefaultClient{ defaultVersion: v, terraformPluginCacheDir: tmp, overrideTF: "echo", + projectCmdOutputHandler: projectCmdOutputHandler, } - log := logging.NewNoopLogger(t) - _, outCh := client.RunCommandAsync(log, tmp, []string{"stderr", ">&2"}, map[string]string{}, nil, "workspace") + _, outCh := client.RunCommandAsync(ctx, tmp, []string{"stderr", ">&2"}, map[string]string{}, nil, "workspace") out, err := waitCh(outCh) Ok(t, err) @@ -215,14 +312,33 @@ func TestDefaultClient_RunCommandAsync_ExitOne(t *testing.T) { v, err := version.NewVersion("0.11.11") Ok(t, err) tmp, cleanup := TempDir(t) + logger := logging.NewNoopLogger(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + ProjectName: "projectname", + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } defer cleanup() client := &DefaultClient{ defaultVersion: v, terraformPluginCacheDir: tmp, overrideTF: "echo", + projectCmdOutputHandler: projectCmdOutputHandler, } - log := logging.NewNoopLogger(t) - _, outCh := client.RunCommandAsync(log, tmp, []string{"dying", "&&", "exit", "1"}, map[string]string{}, nil, "workspace") + _, outCh := client.RunCommandAsync(ctx, tmp, []string{"dying", "&&", "exit", "1"}, map[string]string{}, nil, "workspace") out, err := waitCh(outCh) ErrEquals(t, fmt.Sprintf(`running "echo dying && exit 1" in %q: exit status 1`, tmp), err) @@ -234,14 +350,34 @@ func TestDefaultClient_RunCommandAsync_Input(t *testing.T) { v, err := version.NewVersion("0.11.11") Ok(t, err) tmp, cleanup := TempDir(t) + logger := logging.NewNoopLogger(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + + ctx := models.ProjectCommandContext{ + Log: logger, + Workspace: "default", + RepoRelDir: ".", + User: models.User{Username: "username"}, + EscapedCommentArgs: []string{"comment", "args"}, + ProjectName: "projectname", + Pull: models.PullRequest{ + Num: 2, + }, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + }, + } defer cleanup() client := &DefaultClient{ defaultVersion: v, terraformPluginCacheDir: tmp, overrideTF: "read", + projectCmdOutputHandler: projectCmdOutputHandler, } - log := logging.NewNoopLogger(t) - inCh, outCh := client.RunCommandAsync(log, tmp, []string{"a", "&&", "echo", "$a"}, map[string]string{}, nil, "workspace") + + inCh, outCh := client.RunCommandAsync(ctx, tmp, []string{"a", "&&", "echo", "$a"}, map[string]string{}, nil, "workspace") inCh <- "echo me\n" out, err := waitCh(outCh) diff --git a/server/core/terraform/terraform_client_test.go b/server/core/terraform/terraform_client_test.go index 5ccb2e5734..420e55eb7a 100644 --- a/server/core/terraform/terraform_client_test.go +++ b/server/core/terraform/terraform_client_test.go @@ -15,7 +15,6 @@ package terraform_test import ( "fmt" - "io/ioutil" "os" "path/filepath" "runtime" @@ -28,6 +27,8 @@ import ( "github.com/runatlantis/atlantis/cmd" "github.com/runatlantis/atlantis/server/core/terraform" "github.com/runatlantis/atlantis/server/core/terraform/mocks" + "github.com/runatlantis/atlantis/server/events/models" + handlermocks "github.com/runatlantis/atlantis/server/handlers/mocks" "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) @@ -60,23 +61,29 @@ Your version of Terraform is out of date! The latest version is 0.11.13. You can update by downloading from www.terraform.io/downloads.html ` tmp, binDir, cacheDir, cleanup := mkSubDirs(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + ctx := models.ProjectCommandContext{ + Log: logging.NewNoopLogger(t), + Workspace: "default", + RepoRelDir: ".", + } defer cleanup() logger := logging.NewNoopLogger(t) // We're testing this by adding our own "fake" terraform binary to path that // outputs what would normally come from terraform version. - err := ioutil.WriteFile(filepath.Join(tmp, "terraform"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 + err := os.WriteFile(filepath.Join(tmp, "terraform"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 Ok(t, err) defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true) + c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) - output, err := c.RunCommandWithVersion(logger, tmp, nil, map[string]string{"test": "123"}, nil, "") + output, err := c.RunCommandWithVersion(ctx, tmp, []string{"terraform", "init"}, map[string]string{"test": "123"}, nil, "") Ok(t, err) Equals(t, fakeBinOut+"\n", output) } @@ -91,21 +98,27 @@ is 0.11.13. You can update by downloading from www.terraform.io/downloads.html ` logger := logging.NewNoopLogger(t) tmp, binDir, cacheDir, cleanup := mkSubDirs(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + ctx := models.ProjectCommandContext{ + Log: logging.NewNoopLogger(t), + Workspace: "default", + RepoRelDir: ".", + } defer cleanup() // We're testing this by adding our own "fake" terraform binary to path that // outputs what would normally come from terraform version. - err := ioutil.WriteFile(filepath.Join(tmp, "terraform"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 + err := os.WriteFile(filepath.Join(tmp, "terraform"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 Ok(t, err) defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true) + c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) - output, err := c.RunCommandWithVersion(logger, tmp, nil, map[string]string{}, nil, "") + output, err := c.RunCommandWithVersion(ctx, tmp, []string{"terraform", "init"}, map[string]string{}, nil, "") Ok(t, err) Equals(t, fakeBinOut+"\n", output) } @@ -115,12 +128,13 @@ is 0.11.13. You can update by downloading from www.terraform.io/downloads.html func TestNewClient_NoTF(t *testing.T) { logger := logging.NewNoopLogger(t) tmp, binDir, cacheDir, cleanup := mkSubDirs(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() defer cleanup() // Set PATH to only include our empty directory. defer tempSetEnv(t, "PATH", tmp)() - _, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true) + _, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) ErrEquals(t, "terraform not found in $PATH. Set --default-tf-version or download terraform from https://www.terraform.io/downloads.html", err) } @@ -130,21 +144,27 @@ func TestNewClient_DefaultTFFlagInPath(t *testing.T) { fakeBinOut := "Terraform v0.11.10\n" logger := logging.NewNoopLogger(t) tmp, binDir, cacheDir, cleanup := mkSubDirs(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + ctx := models.ProjectCommandContext{ + Log: logging.NewNoopLogger(t), + Workspace: "default", + RepoRelDir: ".", + } defer cleanup() // We're testing this by adding our own "fake" terraform binary to path that // outputs what would normally come from terraform version. - err := ioutil.WriteFile(filepath.Join(tmp, "terraform0.11.10"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 + err := os.WriteFile(filepath.Join(tmp, "terraform0.11.10"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 Ok(t, err) defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true) + c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) - output, err := c.RunCommandWithVersion(logger, tmp, nil, map[string]string{}, nil, "") + output, err := c.RunCommandWithVersion(ctx, tmp, []string{"terraform", "init"}, map[string]string{}, nil, "") Ok(t, err) Equals(t, fakeBinOut+"\n", output) } @@ -153,22 +173,27 @@ func TestNewClient_DefaultTFFlagInPath(t *testing.T) { // bin dir that we use it. func TestNewClient_DefaultTFFlagInBinDir(t *testing.T) { fakeBinOut := "Terraform v0.11.10\n" - logger := logging.NewNoopLogger(t) tmp, binDir, cacheDir, cleanup := mkSubDirs(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + ctx := models.ProjectCommandContext{ + Log: logging.NewNoopLogger(t), + Workspace: "default", + RepoRelDir: ".", + } defer cleanup() // Add our fake binary to {datadir}/bin/terraform{version}. - err := ioutil.WriteFile(filepath.Join(binDir, "terraform0.11.10"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 + err := os.WriteFile(filepath.Join(binDir, "terraform0.11.10"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 Ok(t, err) defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - c, err := terraform.NewClient(logging.NewNoopLogger(t), binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true) + c, err := terraform.NewClient(logging.NewNoopLogger(t), binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) - output, err := c.RunCommandWithVersion(logger, tmp, nil, map[string]string{}, nil, "") + output, err := c.RunCommandWithVersion(ctx, tmp, []string{"terraform", "init"}, map[string]string{}, nil, "") Ok(t, err) Equals(t, fakeBinOut+"\n", output) } @@ -178,6 +203,12 @@ func TestNewClient_DefaultTFFlagDownload(t *testing.T) { RegisterMockTestingT(t) logger := logging.NewNoopLogger(t) tmp, binDir, cacheDir, cleanup := mkSubDirs(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + ctx := models.ProjectCommandContext{ + Log: logging.NewNoopLogger(t), + Workspace: "default", + RepoRelDir: ".", + } defer cleanup() // Set PATH to empty so there's no TF available. @@ -186,10 +217,10 @@ func TestNewClient_DefaultTFFlagDownload(t *testing.T) { mockDownloader := mocks.NewMockDownloader() When(mockDownloader.GetFile(AnyString(), AnyString())).Then(func(params []pegomock.Param) pegomock.ReturnValues { - err := ioutil.WriteFile(params[0].(string), []byte("#!/bin/sh\necho '\nTerraform v0.11.10\n'"), 0700) // #nosec G306 + err := os.WriteFile(params[0].(string), []byte("#!/bin/sh\necho '\nTerraform v0.11.10\n'"), 0700) // #nosec G306 return []pegomock.ReturnValue{err} }) - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, "https://my-mirror.releases.mycompany.com", mockDownloader, true) + c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, "https://my-mirror.releases.mycompany.com", mockDownloader, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) @@ -204,7 +235,8 @@ func TestNewClient_DefaultTFFlagDownload(t *testing.T) { // Reset PATH so that it has sh. Ok(t, os.Setenv("PATH", orig)) - output, err := c.RunCommandWithVersion(logger, tmp, nil, map[string]string{}, nil, "") + + output, err := c.RunCommandWithVersion(ctx, tmp, []string{"terraform", "init"}, map[string]string{}, nil, "") Ok(t, err) Equals(t, "\nTerraform v0.11.10\n\n", output) } @@ -213,8 +245,9 @@ func TestNewClient_DefaultTFFlagDownload(t *testing.T) { func TestNewClient_BadVersion(t *testing.T) { logger := logging.NewNoopLogger(t) _, binDir, cacheDir, cleanup := mkSubDirs(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() defer cleanup() - _, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "malformed", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true) + _, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "malformed", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) ErrEquals(t, "Malformed version: malformed", err) } @@ -223,6 +256,12 @@ func TestRunCommandWithVersion_DLsTF(t *testing.T) { logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) tmp, binDir, cacheDir, cleanup := mkSubDirs(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + ctx := models.ProjectCommandContext{ + Log: logging.NewNoopLogger(t), + Workspace: "default", + RepoRelDir: ".", + } defer cleanup() mockDownloader := mocks.NewMockDownloader() @@ -234,17 +273,19 @@ func TestRunCommandWithVersion_DLsTF(t *testing.T) { runtime.GOARCH, baseURL) When(mockDownloader.GetFile(filepath.Join(tmp, "bin", "terraform99.99.99"), expURL)).Then(func(params []pegomock.Param) pegomock.ReturnValues { - err := ioutil.WriteFile(params[0].(string), []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 + err := os.WriteFile(params[0].(string), []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 return []pegomock.ReturnValue{err} }) - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, true) + c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, true, projectCmdOutputHandler) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) v, err := version.NewVersion("99.99.99") Ok(t, err) - output, err := c.RunCommandWithVersion(logger, tmp, nil, map[string]string{}, v, "") + + output, err := c.RunCommandWithVersion(ctx, tmp, []string{"terraform", "init"}, map[string]string{}, v, "") + Assert(t, err == nil, "err: %s: %s", err, output) Equals(t, "\nTerraform v99.99.99\n\n", output) } @@ -254,11 +295,12 @@ func TestEnsureVersion_downloaded(t *testing.T) { logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) tmp, binDir, cacheDir, cleanup := mkSubDirs(t) + projectCmdOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() defer cleanup() mockDownloader := mocks.NewMockDownloader() - c, err := terraform.NewTestClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, true) + c, err := terraform.NewTestClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, true, projectCmdOutputHandler) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) diff --git a/server/events/apply_command_runner.go b/server/events/apply_command_runner.go index 6cd23c213a..e0c1ac2f87 100644 --- a/server/events/apply_command_runner.go +++ b/server/events/apply_command_runner.go @@ -21,6 +21,7 @@ func NewApplyCommandRunner( parallelPoolSize int, SilenceNoProjects bool, silenceVCSStatusNoProjects bool, + pullReqStatusFetcher vcs.PullReqStatusFetcher, ) *ApplyCommandRunner { return &ApplyCommandRunner{ vcsClient: vcsClient, @@ -36,21 +37,23 @@ func NewApplyCommandRunner( parallelPoolSize: parallelPoolSize, SilenceNoProjects: SilenceNoProjects, silenceVCSStatusNoProjects: silenceVCSStatusNoProjects, + pullReqStatusFetcher: pullReqStatusFetcher, } } type ApplyCommandRunner struct { - DisableApplyAll bool - DB *db.BoltDB - locker locking.ApplyLockChecker - vcsClient vcs.Client - commitStatusUpdater CommitStatusUpdater - prjCmdBuilder ProjectApplyCommandBuilder - prjCmdRunner ProjectApplyCommandRunner - autoMerger *AutoMerger - pullUpdater *PullUpdater - dbUpdater *DBUpdater - parallelPoolSize int + DisableApplyAll bool + DB *db.BoltDB + locker locking.ApplyLockChecker + vcsClient vcs.Client + commitStatusUpdater CommitStatusUpdater + prjCmdBuilder ProjectApplyCommandBuilder + prjCmdRunner ProjectApplyCommandRunner + autoMerger *AutoMerger + pullUpdater *PullUpdater + dbUpdater *DBUpdater + parallelPoolSize int + pullReqStatusFetcher vcs.PullReqStatusFetcher // SilenceNoProjects is whether Atlantis should respond to PRs if no projects // are found SilenceNoProjects bool @@ -98,17 +101,16 @@ func (a *ApplyCommandRunner) Run(ctx *CommandContext, cmd *CommentCommand) { // We do this here because when we set a "Pending" status, if users have // required the Atlantis status checks to pass, then we've now changed // the mergeability status of the pull request. - ctx.PullMergeable, err = a.vcsClient.PullIsMergeable(baseRepo, pull) + // This sets the approved, mergeable, and sqlocked status in the context. + ctx.PullRequestStatus, err = a.pullReqStatusFetcher.FetchPullStatus(baseRepo, pull) if err != nil { // On error we continue the request with mergeable assumed false. // We want to continue because not all apply's will need this status, // only if they rely on the mergeability requirement. - ctx.PullMergeable = false - ctx.Log.Warn("unable to get mergeable status: %s. Continuing with mergeable assumed false", err) + // All PullRequestStatus fields are set to false by default when error. + ctx.Log.Warn("unable to get pull request status: %s. Continuing with mergeable and approved assumed false", err) } - ctx.Log.Info("pull request mergeable status: %t", ctx.PullMergeable) - var projectCmds []models.ProjectCommandContext projectCmds, err = a.prjCmdBuilder.BuildApplyCommands(ctx, cmd) diff --git a/server/events/apply_requirement_handler.go b/server/events/apply_requirement_handler.go index bd0c8051f2..8ca844a88d 100644 --- a/server/events/apply_requirement_handler.go +++ b/server/events/apply_requirement_handler.go @@ -1,8 +1,6 @@ package events import ( - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/yaml/raw" "github.com/runatlantis/atlantis/server/events/yaml/valid" @@ -14,20 +12,14 @@ type ApplyRequirement interface { } type AggregateApplyRequirements struct { - PullApprovedChecker runtime.PullApprovedChecker - WorkingDir WorkingDir + WorkingDir WorkingDir } func (a *AggregateApplyRequirements) ValidateProject(repoDir string, ctx models.ProjectCommandContext) (failure string, err error) { - for _, req := range ctx.ApplyRequirements { switch req { case raw.ApprovedApplyRequirement: - approved, err := a.PullApprovedChecker.PullIsApproved(ctx.Pull.BaseRepo, ctx.Pull) // nolint: vetshadow - if err != nil { - return "", errors.Wrap(err, "checking if pull request was approved") - } - if !approved { + if !ctx.PullReqStatus.ApprovalStatus.IsApproved { return "Pull request must be approved by at least one person other than the author before running apply.", nil } // this should come before mergeability check since mergeability is a superset of this check. @@ -36,7 +28,7 @@ func (a *AggregateApplyRequirements) ValidateProject(repoDir string, ctx models. return "All policies must pass for project before running apply", nil } case raw.MergeableApplyRequirement: - if !ctx.PullMergeable { + if !ctx.PullReqStatus.Mergeable { return "Pull request must be mergeable before running apply.", nil } case raw.UnDivergedApplyRequirement: diff --git a/server/events/command_context.go b/server/events/command_context.go index a4d6ebb13a..fcea22dfed 100644 --- a/server/events/command_context.go +++ b/server/events/command_context.go @@ -40,11 +40,9 @@ type CommandContext struct { // User is the user that triggered this command. User models.User Log logging.SimpleLogging - // PullMergeable is true if Pull is able to be merged. This is available in - // the CommandContext because we want to collect this information before we - // set our own build statuses which can affect mergeability if users have - // required the Atlantis status to be successful prior to merging. - PullMergeable bool + + // Current PR state + PullRequestStatus models.PullReqStatus PullStatus *models.PullStatus diff --git a/server/events/command_runner.go b/server/events/command_runner.go index cb15e75eb7..106e392061 100644 --- a/server/events/command_runner.go +++ b/server/events/command_runner.go @@ -116,6 +116,7 @@ type DefaultCommandRunner struct { Drainer *Drainer PreWorkflowHooksCommandRunner PreWorkflowHooksCommandRunner PullStatusFetcher PullStatusFetcher + TeamAllowlistChecker *TeamAllowlistChecker } // RunAutoplanCommand runs plan and policy_checks when a pull request is opened or updated. @@ -162,6 +163,32 @@ func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo autoPlanRunner.Run(ctx, nil) } +// commentUserDoesNotHavePermissions comments on the pull request that the user +// is not allowed to execute the command. +func (c *DefaultCommandRunner) commentUserDoesNotHavePermissions(baseRepo models.Repo, pullNum int, user models.User, cmd *CommentCommand) { + errMsg := fmt.Sprintf("```\nError: User @%s does not have permissions to execute '%s' command.\n```", user.Username, cmd.Name.String()) + if err := c.VCSClient.CreateComment(baseRepo, pullNum, errMsg, ""); err != nil { + c.Logger.Err("unable to comment on pull request: %s", err) + } +} + +// checkUserPermissions checks if the user has permissions to execute the command +func (c *DefaultCommandRunner) checkUserPermissions(repo models.Repo, user models.User, cmd *CommentCommand) (bool, error) { + if c.TeamAllowlistChecker == nil || len(c.TeamAllowlistChecker.rules) == 0 { + // allowlist restriction is not enabled + return true, nil + } + teams, err := c.VCSClient.GetTeamNamesForUser(repo, user) + if err != nil { + return false, err + } + ok := c.TeamAllowlistChecker.IsCommandAllowedForAnyTeam(teams, cmd.Name.String()) + if !ok { + return false, nil + } + return true, nil +} + // RunCommentCommand executes the command. // We take in a pointer for maybeHeadRepo because for some events there isn't // enough data to construct the Repo model and callers might want to wait until @@ -179,6 +206,17 @@ func (c *DefaultCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHead log := c.buildLogger(baseRepo.FullName, pullNum) defer c.logPanics(baseRepo, pullNum, log) + // Check if the user who commented has the permissions to execute the 'plan' or 'apply' commands + ok, err := c.checkUserPermissions(baseRepo, user, cmd) + if err != nil { + c.Logger.Err("Unable to check user permissions: %s", err) + return + } + if !ok { + c.commentUserDoesNotHavePermissions(baseRepo, pullNum, user, cmd) + return + } + headRepo, pull, err := c.ensureValidRepoMetadata(baseRepo, maybeHeadRepo, maybePull, user, pullNum, log) if err != nil { return diff --git a/server/events/command_runner_test.go b/server/events/command_runner_test.go index 0184d226e2..26abd4e9e8 100644 --- a/server/events/command_runner_test.go +++ b/server/events/command_runner_test.go @@ -21,6 +21,7 @@ import ( "testing" "github.com/runatlantis/atlantis/server/core/db" + "github.com/runatlantis/atlantis/server/events/vcs" "github.com/runatlantis/atlantis/server/events/yaml/valid" "github.com/runatlantis/atlantis/server/logging" @@ -77,7 +78,6 @@ func setup(t *testing.T) *vcsmocks.MockClient { workingDir = mocks.NewMockWorkingDir() pendingPlanFinder = mocks.NewMockPendingPlanFinder() commitUpdater = mocks.NewMockCommitStatusUpdater() - tmp, cleanup := TempDir(t) defer cleanup() defaultBoltDB, err := db.New(tmp) @@ -131,6 +131,8 @@ func setup(t *testing.T) *vcsmocks.MockClient { defaultBoltDB, ) + pullReqStatusFetcher := vcs.NewPullReqStatusFetcher(vcsClient) + applyCommandRunner = events.NewApplyCommandRunner( vcsClient, false, @@ -145,6 +147,7 @@ func setup(t *testing.T) *vcsmocks.MockClient { parallelPoolSize, SilenceNoProjects, false, + pullReqStatusFetcher, ) approvePoliciesCommandRunner = events.NewApprovePoliciesCommandRunner( diff --git a/server/events/comment_parser.go b/server/events/comment_parser.go index 395516895c..79f13e89dc 100644 --- a/server/events/comment_parser.go +++ b/server/events/comment_parser.go @@ -16,7 +16,7 @@ package events import ( "bytes" "fmt" - "io/ioutil" + "io" "net/url" "path/filepath" "regexp" @@ -97,16 +97,19 @@ type CommentParseResult struct { // Valid commands contain: // - The initial "executable" name, 'run' or 'atlantis' or '@GithubUser' // where GithubUser is the API user Atlantis is running as. -// - Then a command, either 'plan', 'apply', 'approve_policies', or 'help'. +// - Then a command: 'plan', 'apply', 'unlock', 'version, 'approve_policies', +// or 'help'. // - Then optional flags, then an optional separator '--' followed by optional // extra flags to be appended to the terraform plan/apply command. // // Examples: // - atlantis help -// - run plan +// - run apply // - @GithubUser plan -w staging // - atlantis plan -w staging -d dir --verbose // - atlantis plan --verbose -- -key=value -key2 value2 +// - atlantis unlock +// - atlantis version // - atlantis approve_policies // func (e *CommentParser) Parse(comment string, vcsHost models.VCSHostType) CommentParseResult { @@ -166,7 +169,7 @@ func (e *CommentParser) Parse(comment string, vcsHost models.VCSHostType) Commen return CommentParseResult{CommentResponse: e.HelpComment(e.ApplyDisabled)} } - // Need to have a plan, apply, approve_policy or unlock at this point. + // Need plan, apply, unlock, approve_policies, or version at this point. if !e.stringInSlice(command, []string{models.PlanCommand.String(), models.ApplyCommand.String(), models.UnlockCommand.String(), models.ApprovePoliciesCommand.String(), models.VersionCommand.String()}) { return CommentParseResult{CommentResponse: fmt.Sprintf("```\nError: unknown command %q.\nRun 'atlantis --help' for usage.\n```", command)} } @@ -183,7 +186,7 @@ func (e *CommentParser) Parse(comment string, vcsHost models.VCSHostType) Commen case models.PlanCommand.String(): name = models.PlanCommand flagSet = pflag.NewFlagSet(models.PlanCommand.String(), pflag.ContinueOnError) - flagSet.SetOutput(ioutil.Discard) + flagSet.SetOutput(io.Discard) flagSet.StringVarP(&workspace, workspaceFlagLong, workspaceFlagShort, "", "Switch to this Terraform workspace before planning.") flagSet.StringVarP(&dir, dirFlagLong, dirFlagShort, "", "Which directory to run plan in relative to root of repo, ex. 'child/dir'.") flagSet.StringVarP(&project, projectFlagLong, projectFlagShort, "", fmt.Sprintf("Which project to run plan for. Refers to the name of the project configured in %s. Cannot be used at same time as workspace or dir flags.", yaml.AtlantisYAMLFilename)) @@ -191,7 +194,7 @@ func (e *CommentParser) Parse(comment string, vcsHost models.VCSHostType) Commen case models.ApplyCommand.String(): name = models.ApplyCommand flagSet = pflag.NewFlagSet(models.ApplyCommand.String(), pflag.ContinueOnError) - flagSet.SetOutput(ioutil.Discard) + flagSet.SetOutput(io.Discard) flagSet.StringVarP(&workspace, workspaceFlagLong, workspaceFlagShort, "", "Apply the plan for this Terraform workspace.") flagSet.StringVarP(&dir, dirFlagLong, dirFlagShort, "", "Apply the plan for this directory, relative to root of repo, ex. 'child/dir'.") flagSet.StringVarP(&project, projectFlagLong, projectFlagShort, "", fmt.Sprintf("Apply the plan for this project. Refers to the name of the project configured in %s. Cannot be used at same time as workspace or dir flags.", yaml.AtlantisYAMLFilename)) @@ -200,12 +203,12 @@ func (e *CommentParser) Parse(comment string, vcsHost models.VCSHostType) Commen case models.ApprovePoliciesCommand.String(): name = models.ApprovePoliciesCommand flagSet = pflag.NewFlagSet(models.ApprovePoliciesCommand.String(), pflag.ContinueOnError) - flagSet.SetOutput(ioutil.Discard) + flagSet.SetOutput(io.Discard) flagSet.BoolVarP(&verbose, verboseFlagLong, verboseFlagShort, false, "Append Atlantis log to comment.") case models.UnlockCommand.String(): name = models.UnlockCommand flagSet = pflag.NewFlagSet(models.UnlockCommand.String(), pflag.ContinueOnError) - flagSet.SetOutput(ioutil.Discard) + flagSet.SetOutput(io.Discard) case models.VersionCommand.String(): name = models.VersionCommand flagSet = pflag.NewFlagSet(models.VersionCommand.String(), pflag.ContinueOnError) @@ -404,6 +407,8 @@ Commands: {{- end }} unlock Removes all atlantis locks and discards all plans for this PR. To unlock a specific plan you can use the Atlantis UI. + approve_policies + Approves all current policy checking failures for the PR. version Print the output of 'terraform version' help View help. diff --git a/server/events/comment_parser_test.go b/server/events/comment_parser_test.go index 5bf664714b..1331f420ba 100644 --- a/server/events/comment_parser_test.go +++ b/server/events/comment_parser_test.go @@ -729,6 +729,8 @@ Commands: To only apply a specific plan, use the -d, -w and -p flags. unlock Removes all atlantis locks and discards all plans for this PR. To unlock a specific plan you can use the Atlantis UI. + approve_policies + Approves all current policy checking failures for the PR. version Print the output of 'terraform version' help View help. @@ -756,6 +758,8 @@ Commands: To plan a specific project, use the -d, -w and -p flags. unlock Removes all atlantis locks and discards all plans for this PR. To unlock a specific plan you can use the Atlantis UI. + approve_policies + Approves all current policy checking failures for the PR. version Print the output of 'terraform version' help View help. diff --git a/server/events/commit_status_updater.go b/server/events/commit_status_updater.go index d5be3b0910..a851cdbadd 100644 --- a/server/events/commit_status_updater.go +++ b/server/events/commit_status_updater.go @@ -28,10 +28,10 @@ import ( type CommitStatusUpdater interface { // UpdateCombined updates the combined status of the head commit of pull. // A combined status represents all the projects modified in the pull. - UpdateCombined(repo models.Repo, pull models.PullRequest, status models.CommitStatus, command models.CommandName) error + UpdateCombined(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName models.CommandName) error // UpdateCombinedCount updates the combined status to reflect the // numSuccess out of numTotal. - UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, command models.CommandName, numSuccess int, numTotal int) error + UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName models.CommandName, numSuccess int, numTotal int) error // UpdateProject sets the commit status for the project represented by // ctx. UpdateProject(ctx models.ProjectCommandContext, cmdName models.CommandName, status models.CommitStatus, url string) error @@ -39,31 +39,21 @@ type CommitStatusUpdater interface { // DefaultCommitStatusUpdater implements CommitStatusUpdater. type DefaultCommitStatusUpdater struct { - Client vcs.Client - // StatusName is the name used to identify Atlantis when creating PR statuses. - StatusName string + Client vcs.Client + TitleBuilder vcs.StatusTitleBuilder } func (d *DefaultCommitStatusUpdater) UpdateCombined(repo models.Repo, pull models.PullRequest, status models.CommitStatus, command models.CommandName) error { - src := fmt.Sprintf("%s/%s", d.StatusName, command.String()) - var descripWords string - switch status { - case models.PendingCommitStatus: - descripWords = "in progress..." - case models.FailedCommitStatus: - descripWords = "failed." - case models.SuccessCommitStatus: - descripWords = "succeeded." - } - descrip := fmt.Sprintf("%s %s", strings.Title(command.String()), descripWords) + src := d.TitleBuilder.Build(command.String()) + descrip := fmt.Sprintf("%s %s", strings.Title(command.String()), d.statusDescription(status)) return d.Client.UpdateStatus(repo, pull, status, src, descrip, "") } -func (d *DefaultCommitStatusUpdater) UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, command models.CommandName, numSuccess int, numTotal int) error { - src := fmt.Sprintf("%s/%s", d.StatusName, command.String()) +func (d *DefaultCommitStatusUpdater) UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName models.CommandName, numSuccess int, numTotal int) error { + src := d.TitleBuilder.Build(cmdName.String()) cmdVerb := "unknown" - switch command { + switch cmdName { case models.PlanCommand: cmdVerb = "planned" case models.PolicyCheckCommand: @@ -80,7 +70,14 @@ func (d *DefaultCommitStatusUpdater) UpdateProject(ctx models.ProjectCommandCont if projectID == "" { projectID = fmt.Sprintf("%s/%s", ctx.RepoRelDir, ctx.Workspace) } - src := fmt.Sprintf("%s/%s: %s", d.StatusName, cmdName.String(), projectID) + src := d.TitleBuilder.Build(cmdName.String(), vcs.StatusTitleOptions{ + ProjectName: projectID, + }) + descrip := fmt.Sprintf("%s %s", strings.Title(cmdName.String()), d.statusDescription(status)) + return d.Client.UpdateStatus(ctx.BaseRepo, ctx.Pull, status, src, descrip, url) +} + +func (d *DefaultCommitStatusUpdater) statusDescription(status models.CommitStatus) string { var descripWords string switch status { case models.PendingCommitStatus: @@ -90,6 +87,6 @@ func (d *DefaultCommitStatusUpdater) UpdateProject(ctx models.ProjectCommandCont case models.SuccessCommitStatus: descripWords = "succeeded." } - descrip := fmt.Sprintf("%s %s", strings.Title(cmdName.String()), descripWords) - return d.Client.UpdateStatus(ctx.BaseRepo, ctx.Pull, status, src, descrip, url) + + return descripWords } diff --git a/server/events/commit_status_updater_test.go b/server/events/commit_status_updater_test.go index 582daf2586..aa2c658c06 100644 --- a/server/events/commit_status_updater_test.go +++ b/server/events/commit_status_updater_test.go @@ -20,6 +20,7 @@ import ( . "github.com/petergtz/pegomock" "github.com/runatlantis/atlantis/server/events" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/events/vcs" "github.com/runatlantis/atlantis/server/events/vcs/mocks" . "github.com/runatlantis/atlantis/testing" ) @@ -66,7 +67,9 @@ func TestUpdateCombined(t *testing.T) { t.Run(c.expDescrip, func(t *testing.T) { RegisterMockTestingT(t) client := mocks.NewMockClient() - s := events.DefaultCommitStatusUpdater{Client: client, StatusName: "atlantis"} + + titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "atlantis"} + s := events.DefaultCommitStatusUpdater{Client: client, TitleBuilder: titleBuilder} err := s.UpdateCombined(models.Repo{}, models.PullRequest{}, c.status, c.command) Ok(t, err) @@ -132,11 +135,12 @@ func TestUpdateCombinedCount(t *testing.T) { t.Run(c.expDescrip, func(t *testing.T) { RegisterMockTestingT(t) client := mocks.NewMockClient() - s := events.DefaultCommitStatusUpdater{Client: client, StatusName: "atlantis-test"} + titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "atlantis-test"} + s := events.DefaultCommitStatusUpdater{Client: client, TitleBuilder: titleBuilder} err := s.UpdateCombinedCount(models.Repo{}, models.PullRequest{}, c.status, c.command, c.numSuccess, c.numTotal) Ok(t, err) - expSrc := fmt.Sprintf("%s/%s", s.StatusName, c.command) + expSrc := fmt.Sprintf("%s/%s", titleBuilder.TitlePrefix, c.command) client.VerifyWasCalledOnce().UpdateStatus(models.Repo{}, models.PullRequest{}, c.status, expSrc, c.expDescrip, "") }) } @@ -169,7 +173,8 @@ func TestDefaultCommitStatusUpdater_UpdateProjectSrc(t *testing.T) { for _, c := range cases { t.Run(c.expSrc, func(t *testing.T) { client := mocks.NewMockClient() - s := events.DefaultCommitStatusUpdater{Client: client, StatusName: "atlantis"} + titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "atlantis"} + s := events.DefaultCommitStatusUpdater{Client: client, TitleBuilder: titleBuilder} err := s.UpdateProject(models.ProjectCommandContext{ ProjectName: c.projectName, RepoRelDir: c.repoRelDir, @@ -227,7 +232,8 @@ func TestDefaultCommitStatusUpdater_UpdateProject(t *testing.T) { for _, c := range cases { t.Run(c.expDescrip, func(t *testing.T) { client := mocks.NewMockClient() - s := events.DefaultCommitStatusUpdater{Client: client, StatusName: "atlantis"} + titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "atlantis"} + s := events.DefaultCommitStatusUpdater{Client: client, TitleBuilder: titleBuilder} err := s.UpdateProject(models.ProjectCommandContext{ RepoRelDir: ".", Workspace: "default", @@ -245,7 +251,8 @@ func TestDefaultCommitStatusUpdater_UpdateProject(t *testing.T) { func TestDefaultCommitStatusUpdater_UpdateProjectCustomStatusName(t *testing.T) { RegisterMockTestingT(t) client := mocks.NewMockClient() - s := events.DefaultCommitStatusUpdater{Client: client, StatusName: "custom"} + titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "custom"} + s := events.DefaultCommitStatusUpdater{Client: client, TitleBuilder: titleBuilder} err := s.UpdateProject(models.ProjectCommandContext{ RepoRelDir: ".", Workspace: "default", diff --git a/server/events/event_parser.go b/server/events/event_parser.go index d427a4202a..0f487d17dd 100644 --- a/server/events/event_parser.go +++ b/server/events/event_parser.go @@ -636,7 +636,9 @@ func (e *EventParser) ParseGitlabMergeRequest(mr *gitlab.MergeRequest, baseRepo // event given the Bitbucket Server header. func (e *EventParser) GetBitbucketServerPullEventType(eventTypeHeader string) models.PullRequestEventType { switch eventTypeHeader { - case bitbucketserver.PullCreatedHeader: + // PullFromRefUpdatedHeader event occurs on OPEN state pull request + // so no additional checks are needed. + case bitbucketserver.PullCreatedHeader, bitbucketserver.PullFromRefUpdatedHeader: return models.OpenedPullEvent case bitbucketserver.PullMergedHeader, bitbucketserver.PullDeclinedHeader, bitbucketserver.PullDeletedHeader: return models.ClosedPullEvent @@ -786,6 +788,7 @@ func (e *EventParser) ParseAzureDevopsPull(pull *azuredevops.GitPullRequest) (pu err = errors.New("url is null") return } + headBranch := pull.GetSourceRefName() if headBranch == "" { err = errors.New("sourceRefName (branch name) is null") @@ -851,19 +854,22 @@ func (e *EventParser) ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) (m teamProject := adRepo.GetProject() parent := adRepo.GetParentRepository() owner := "" + + uri, err := url.Parse(adRepo.GetWebURL()) + if err != nil { + return models.Repo{}, err + } + if parent != nil { owner = parent.GetName() } else { - uri, err := url.Parse(adRepo.GetWebURL()) - if err != nil { - return models.Repo{}, err - } + if strings.Contains(uri.Host, "visualstudio.com") { owner = strings.Split(uri.Host, ".")[0] } else if strings.Contains(uri.Host, "dev.azure.com") { owner = strings.Split(uri.Path, "/")[1] } else { - owner = "" + owner = strings.Split(uri.Path, "/")[1] // to support owner for self hosted } } @@ -872,7 +878,14 @@ func (e *EventParser) ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) (m // https://docs.microsoft.com/en-us/azure/devops/release-notes/2018/sep-10-azure-devops-launch#switch-existing-organizations-to-use-the-new-domain-name-url project := teamProject.GetName() repo := adRepo.GetName() - cloneURL := fmt.Sprintf("https://dev.azure.com/%s/%s/_git/%s", owner, project, repo) + + host := uri.Host + if host == "" { + host = "dev.azure.com" + } + + cloneURL := fmt.Sprintf("https://%s/%s/%s/_git/%s", host, owner, project, repo) + fmt.Println("%", cloneURL) fullName := fmt.Sprintf("%s/%s/%s", owner, project, repo) return models.NewRepo(models.AzureDevops, fullName, cloneURL, e.AzureDevopsUser, e.AzureDevopsToken) } diff --git a/server/events/event_parser_test.go b/server/events/event_parser_test.go index 1922de084f..47a93e5bac 100644 --- a/server/events/event_parser_test.go +++ b/server/events/event_parser_test.go @@ -16,7 +16,7 @@ package events_test import ( "encoding/json" "fmt" - "io/ioutil" + "os" "path/filepath" "strings" "testing" @@ -325,7 +325,7 @@ func TestParseGithubPull(t *testing.T) { func TestParseGitlabMergeEvent(t *testing.T) { t.Log("should properly parse a gitlab merge event") path := filepath.Join("testdata", "gitlab-merge-request-event.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) Ok(t, err) var event *gitlab.MergeEvent err = json.Unmarshal(bytes, &event) @@ -382,7 +382,7 @@ func TestParseGitlabMergeEvent(t *testing.T) { // i.e. instead of under an owner/repo it's under an owner/group/subgroup/repo. func TestParseGitlabMergeEvent_Subgroup(t *testing.T) { path := filepath.Join("testdata", "gitlab-merge-request-event-subgroup.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) Ok(t, err) var event *gitlab.MergeEvent err = json.Unmarshal(bytes, &event) @@ -457,7 +457,7 @@ func TestParseGitlabMergeEvent_ActionType(t *testing.T) { } path := filepath.Join("testdata", "gitlab-merge-request-event.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) Ok(t, err) mergeEventJSON := string(bytes) @@ -479,7 +479,7 @@ func TestParseGitlabMergeEvent_ActionType(t *testing.T) { func TestParseGitlabMergeRequest(t *testing.T) { t.Log("should properly parse a gitlab merge request") path := filepath.Join("testdata", "gitlab-get-merge-request.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) if err != nil { Ok(t, err) } @@ -517,7 +517,7 @@ func TestParseGitlabMergeRequest(t *testing.T) { func TestParseGitlabMergeRequest_Subgroup(t *testing.T) { path := filepath.Join("testdata", "gitlab-get-merge-request-subgroup.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) if err != nil { Ok(t, err) } @@ -552,7 +552,7 @@ func TestParseGitlabMergeRequest_Subgroup(t *testing.T) { func TestParseGitlabMergeCommentEvent(t *testing.T) { t.Log("should properly parse a gitlab merge comment event") path := filepath.Join("testdata", "gitlab-merge-request-comment-event.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) Ok(t, err) var event *gitlab.MergeCommentEvent err = json.Unmarshal(bytes, &event) @@ -589,7 +589,7 @@ func TestParseGitlabMergeCommentEvent(t *testing.T) { // Should properly parse a gitlab merge comment event from a subgroup repo. func TestParseGitlabMergeCommentEvent_Subgroup(t *testing.T) { path := filepath.Join("testdata", "gitlab-merge-request-comment-event-subgroup.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) Ok(t, err) var event *gitlab.MergeCommentEvent err = json.Unmarshal(bytes, &event) @@ -738,7 +738,7 @@ func TestParseBitbucketCloudCommentEvent_EmptyObject(t *testing.T) { func TestParseBitbucketCloudCommentEvent_CommitHashMissing(t *testing.T) { path := filepath.Join("testdata", "bitbucket-cloud-comment-event.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) Ok(t, err) emptyCommitHash := strings.Replace(string(bytes), ` "hash": "e0624da46d3a",`, "", -1) _, _, _, _, _, err = parser.ParseBitbucketCloudPullCommentEvent([]byte(emptyCommitHash)) @@ -747,7 +747,7 @@ func TestParseBitbucketCloudCommentEvent_CommitHashMissing(t *testing.T) { func TestParseBitbucketCloudCommentEvent_ValidEvent(t *testing.T) { path := filepath.Join("testdata", "bitbucket-cloud-comment-event.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) Ok(t, err) pull, baseRepo, headRepo, user, comment, err := parser.ParseBitbucketCloudPullCommentEvent(bytes) Ok(t, err) @@ -792,7 +792,7 @@ func TestParseBitbucketCloudCommentEvent_ValidEvent(t *testing.T) { func TestParseBitbucketCloudCommentEvent_MultipleStates(t *testing.T) { path := filepath.Join("testdata", "bitbucket-cloud-comment-event.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) if err != nil { Ok(t, err) } @@ -831,7 +831,7 @@ func TestParseBitbucketCloudCommentEvent_MultipleStates(t *testing.T) { func TestParseBitbucketCloudPullEvent_ValidEvent(t *testing.T) { path := filepath.Join("testdata", "bitbucket-cloud-pull-event-created.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) if err != nil { Ok(t, err) } @@ -894,7 +894,7 @@ func TestParseBitbucketCloudPullEvent_States(t *testing.T) { }, } { path := filepath.Join("testdata", c.JSON) - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) if err != nil { Ok(t, err) } @@ -950,7 +950,7 @@ func TestParseBitbucketServerCommentEvent_EmptyObject(t *testing.T) { func TestParseBitbucketServerCommentEvent_CommitHashMissing(t *testing.T) { path := filepath.Join("testdata", "bitbucket-server-comment-event.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) if err != nil { Ok(t, err) } @@ -961,7 +961,7 @@ func TestParseBitbucketServerCommentEvent_CommitHashMissing(t *testing.T) { func TestParseBitbucketServerCommentEvent_ValidEvent(t *testing.T) { path := filepath.Join("testdata", "bitbucket-server-comment-event.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) if err != nil { Ok(t, err) } @@ -1008,7 +1008,7 @@ func TestParseBitbucketServerCommentEvent_ValidEvent(t *testing.T) { func TestParseBitbucketServerCommentEvent_MultipleStates(t *testing.T) { path := filepath.Join("testdata", "bitbucket-server-comment-event.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) if err != nil { Ok(t, err) } @@ -1043,7 +1043,7 @@ func TestParseBitbucketServerCommentEvent_MultipleStates(t *testing.T) { func TestParseBitbucketServerPullEvent_ValidEvent(t *testing.T) { path := filepath.Join("testdata", "bitbucket-server-pull-event-merged.json") - bytes, err := ioutil.ReadFile(path) + bytes, err := os.ReadFile(path) if err != nil { Ok(t, err) } @@ -1313,3 +1313,179 @@ func TestParseAzureDevopsPull(t *testing.T) { Equals(t, expBaseRepo, actBaseRepo) Equals(t, expBaseRepo, actHeadRepo) } + +func TestParseAzureDevopsSelfHostedRepo(t *testing.T) { + // this should be successful + repo := ADSelfRepo + repo.ParentRepository = nil + r, err := parser.ParseAzureDevopsRepo(&repo) + Ok(t, err) + Equals(t, models.Repo{ + Owner: "owner/project", + FullName: "owner/project/repo", + CloneURL: "https://azuredevops-user:azuredevops-token@devops.abc.com/owner/project/_git/repo", + SanitizedCloneURL: "https://azuredevops-user:@devops.abc.com/owner/project/_git/repo", + Name: "repo", + VCSHost: models.VCSHost{ + Hostname: "devops.abc.com", + Type: models.AzureDevops, + }, + }, r) + +} + +func TestParseAzureDevopsSelfHostedPullEvent(t *testing.T) { + _, _, _, _, _, err := parser.ParseAzureDevopsPullEvent(ADSelfPullEvent) + Ok(t, err) + + testPull := deepcopy.Copy(ADSelfPull).(azuredevops.GitPullRequest) + testPull.LastMergeSourceCommit.CommitID = nil + _, _, _, err = parser.ParseAzureDevopsPull(&testPull) + ErrEquals(t, "lastMergeSourceCommit.commitID is null", err) + + testPull = deepcopy.Copy(ADSelfPull).(azuredevops.GitPullRequest) + testPull.URL = nil + _, _, _, err = parser.ParseAzureDevopsPull(&testPull) + ErrEquals(t, "url is null", err) + testEvent := deepcopy.Copy(ADSelfPullEvent).(azuredevops.Event) + resource := deepcopy.Copy(testEvent.Resource).(*azuredevops.GitPullRequest) + resource.CreatedBy = nil + testEvent.Resource = resource + _, _, _, _, _, err = parser.ParseAzureDevopsPullEvent(testEvent) + ErrEquals(t, "CreatedBy is null", err) + + testEvent = deepcopy.Copy(ADSelfPullEvent).(azuredevops.Event) + resource = deepcopy.Copy(testEvent.Resource).(*azuredevops.GitPullRequest) + resource.CreatedBy.UniqueName = azuredevops.String("") + testEvent.Resource = resource + _, _, _, _, _, err = parser.ParseAzureDevopsPullEvent(testEvent) + ErrEquals(t, "CreatedBy.UniqueName is null", err) + + actPull, evType, actBaseRepo, actHeadRepo, actUser, err := parser.ParseAzureDevopsPullEvent(ADSelfPullEvent) + Ok(t, err) + expBaseRepo := models.Repo{ + Owner: "owner/project", + FullName: "owner/project/repo", + CloneURL: "https://azuredevops-user:azuredevops-token@devops.abc.com/owner/project/_git/repo", + SanitizedCloneURL: "https://azuredevops-user:@devops.abc.com/owner/project/_git/repo", + Name: "repo", + VCSHost: models.VCSHost{ + Hostname: "devops.abc.com", + Type: models.AzureDevops, + }, + } + Equals(t, expBaseRepo, actBaseRepo) + Equals(t, expBaseRepo, actHeadRepo) + Equals(t, models.PullRequest{ + URL: ADSelfPull.GetURL(), + Author: ADSelfPull.CreatedBy.GetUniqueName(), + HeadBranch: "feature/sourceBranch", + BaseBranch: "targetBranch", + HeadCommit: ADSelfPull.LastMergeSourceCommit.GetCommitID(), + Num: ADSelfPull.GetPullRequestID(), + State: models.OpenPullState, + BaseRepo: expBaseRepo, + }, actPull) + Equals(t, models.OpenedPullEvent, evType) + Equals(t, models.User{Username: "user@example.com"}, actUser) +} + +func TestParseAzureDevopsSelfHostedPullEvent_EventType(t *testing.T) { + cases := []struct { + action string + exp models.PullRequestEventType + }{ + { + action: "git.pullrequest.updated", + exp: models.UpdatedPullEvent, + }, + { + action: "git.pullrequest.created", + exp: models.OpenedPullEvent, + }, + { + action: "git.pullrequest.updated", + exp: models.ClosedPullEvent, + }, + { + action: "anything_else", + exp: models.OtherPullEvent, + }, + } + + for _, c := range cases { + t.Run(c.action, func(t *testing.T) { + event := deepcopy.Copy(ADSelfPullEvent).(azuredevops.Event) + if c.exp == models.ClosedPullEvent { + event = deepcopy.Copy(ADSelfPullClosedEvent).(azuredevops.Event) + } + event.EventType = c.action + _, actType, _, _, _, err := parser.ParseAzureDevopsPullEvent(event) + Ok(t, err) + Equals(t, c.exp, actType) + }) + } +} + +func TestParseAzureSelfHostedDevopsPull(t *testing.T) { + testPull := deepcopy.Copy(ADSelfPull).(azuredevops.GitPullRequest) + testPull.LastMergeSourceCommit.CommitID = nil + _, _, _, err := parser.ParseAzureDevopsPull(&testPull) + ErrEquals(t, "lastMergeSourceCommit.commitID is null", err) + + testPull = deepcopy.Copy(ADSelfPull).(azuredevops.GitPullRequest) + testPull.URL = nil + _, _, _, err = parser.ParseAzureDevopsPull(&testPull) + ErrEquals(t, "url is null", err) + + testPull = deepcopy.Copy(ADSelfPull).(azuredevops.GitPullRequest) + testPull.SourceRefName = nil + _, _, _, err = parser.ParseAzureDevopsPull(&testPull) + ErrEquals(t, "sourceRefName (branch name) is null", err) + + testPull = deepcopy.Copy(ADSelfPull).(azuredevops.GitPullRequest) + testPull.TargetRefName = nil + _, _, _, err = parser.ParseAzureDevopsPull(&testPull) + ErrEquals(t, "targetRefName (branch name) is null", err) + + testPull = deepcopy.Copy(ADSelfPull).(azuredevops.GitPullRequest) + testPull.CreatedBy = nil + _, _, _, err = parser.ParseAzureDevopsPull(&testPull) + ErrEquals(t, "CreatedBy is null", err) + + testPull = deepcopy.Copy(ADSelfPull).(azuredevops.GitPullRequest) + testPull.CreatedBy.UniqueName = nil + _, _, _, err = parser.ParseAzureDevopsPull(&testPull) + ErrEquals(t, "CreatedBy.UniqueName is null", err) + + testPull = deepcopy.Copy(ADSelfPull).(azuredevops.GitPullRequest) + testPull.PullRequestID = nil + _, _, _, err = parser.ParseAzureDevopsPull(&testPull) + ErrEquals(t, "pullRequestId is null", err) + + actPull, actBaseRepo, actHeadRepo, err := parser.ParseAzureDevopsPull(&ADSelfPull) + Ok(t, err) + expBaseRepo := models.Repo{ + Owner: "owner/project", + FullName: "owner/project/repo", + CloneURL: "https://azuredevops-user:azuredevops-token@devops.abc.com/owner/project/_git/repo", + SanitizedCloneURL: "https://azuredevops-user:@devops.abc.com/owner/project/_git/repo", + Name: "repo", + VCSHost: models.VCSHost{ + Hostname: "devops.abc.com", + Type: models.AzureDevops, + }, + } + Equals(t, models.PullRequest{ + URL: ADSelfPull.GetURL(), + Author: ADSelfPull.CreatedBy.GetUniqueName(), + HeadBranch: "feature/sourceBranch", + BaseBranch: "targetBranch", + HeadCommit: ADSelfPull.LastMergeSourceCommit.GetCommitID(), + Num: ADSelfPull.GetPullRequestID(), + State: models.OpenPullState, + BaseRepo: expBaseRepo, + }, actPull) + Equals(t, expBaseRepo, actBaseRepo) + Equals(t, expBaseRepo, actHeadRepo) +} diff --git a/server/events/git_cred_writer.go b/server/events/git_cred_writer.go index 1eea384c2d..b8e83facb9 100644 --- a/server/events/git_cred_writer.go +++ b/server/events/git_cred_writer.go @@ -2,7 +2,6 @@ package events import ( "fmt" - "io/ioutil" "os" "os/exec" "path/filepath" @@ -24,7 +23,7 @@ func WriteGitCreds(gitUser string, gitToken string, gitHostname string, home str // If the file doesn't exist, write it. if _, err := os.Stat(credsFile); err != nil { - if err := ioutil.WriteFile(credsFile, []byte(config), 0600); err != nil { + if err := os.WriteFile(credsFile, []byte(config), 0600); err != nil { return errors.Wrapf(err, "writing generated %s file with user, token and hostname to %s", credsFilename, credsFile) } logger.Info("wrote git credentials to %s", credsFile) @@ -68,7 +67,7 @@ func WriteGitCreds(gitUser string, gitToken string, gitHostname string, home str } func fileHasLine(line string, filename string) (bool, error) { - currContents, err := ioutil.ReadFile(filename) // nolint: gosec + currContents, err := os.ReadFile(filename) // nolint: gosec if err != nil { return false, errors.Wrapf(err, "reading %s", filename) } @@ -81,18 +80,18 @@ func fileHasLine(line string, filename string) (bool, error) { } func fileAppend(line string, filename string) error { - currContents, err := ioutil.ReadFile(filename) // nolint: gosec + currContents, err := os.ReadFile(filename) // nolint: gosec if err != nil { return err } if len(currContents) > 0 && !strings.HasSuffix(string(currContents), "\n") { line = "\n" + line } - return ioutil.WriteFile(filename, []byte(string(currContents)+line), 0600) + return os.WriteFile(filename, []byte(string(currContents)+line), 0600) } func fileLineReplace(line, user, host, filename string) error { - currContents, err := ioutil.ReadFile(filename) // nolint: gosec + currContents, err := os.ReadFile(filename) // nolint: gosec if err != nil { return err } @@ -112,5 +111,5 @@ func fileLineReplace(line, user, host, filename string) error { return fileAppend(line, filename) } - return ioutil.WriteFile(filename, []byte(toWrite), 0600) + return os.WriteFile(filename, []byte(toWrite), 0600) } diff --git a/server/events/git_cred_writer_test.go b/server/events/git_cred_writer_test.go index c8fc9a2cc2..9d9583abbb 100644 --- a/server/events/git_cred_writer_test.go +++ b/server/events/git_cred_writer_test.go @@ -2,7 +2,7 @@ package events_test import ( "fmt" - "io/ioutil" + "os" "os/exec" "path/filepath" "testing" @@ -24,7 +24,7 @@ func TestWriteGitCreds_WriteFile(t *testing.T) { expContents := `https://user:token@hostname` - actContents, err := ioutil.ReadFile(filepath.Join(tmp, ".git-credentials")) + actContents, err := os.ReadFile(filepath.Join(tmp, ".git-credentials")) Ok(t, err) Equals(t, expContents, string(actContents)) } @@ -36,14 +36,14 @@ func TestWriteGitCreds_Appends(t *testing.T) { defer cleanup() credsFile := filepath.Join(tmp, ".git-credentials") - err := ioutil.WriteFile(credsFile, []byte("contents"), 0600) + err := os.WriteFile(credsFile, []byte("contents"), 0600) Ok(t, err) err = events.WriteGitCreds("user", "token", "hostname", tmp, logger, false) Ok(t, err) expContents := "contents\nhttps://user:token@hostname" - actContents, err := ioutil.ReadFile(filepath.Join(tmp, ".git-credentials")) + actContents, err := os.ReadFile(filepath.Join(tmp, ".git-credentials")) Ok(t, err) Equals(t, expContents, string(actContents)) } @@ -56,12 +56,12 @@ func TestWriteGitCreds_NoModification(t *testing.T) { credsFile := filepath.Join(tmp, ".git-credentials") contents := "line1\nhttps://user:token@hostname\nline2" - err := ioutil.WriteFile(credsFile, []byte(contents), 0600) + err := os.WriteFile(credsFile, []byte(contents), 0600) Ok(t, err) err = events.WriteGitCreds("user", "token", "hostname", tmp, logger, false) Ok(t, err) - actContents, err := ioutil.ReadFile(filepath.Join(tmp, ".git-credentials")) + actContents, err := os.ReadFile(filepath.Join(tmp, ".git-credentials")) Ok(t, err) Equals(t, contents, string(actContents)) } @@ -73,13 +73,13 @@ func TestWriteGitCreds_ReplaceApp(t *testing.T) { credsFile := filepath.Join(tmp, ".git-credentials") contents := "line1\nhttps://x-access-token:v1.87dddddddddddddddd@github.com\nline2" - err := ioutil.WriteFile(credsFile, []byte(contents), 0600) + err := os.WriteFile(credsFile, []byte(contents), 0600) Ok(t, err) err = events.WriteGitCreds("x-access-token", "token", "github.com", tmp, logger, true) Ok(t, err) expContets := "line1\nhttps://x-access-token:token@github.com\nline2" - actContents, err := ioutil.ReadFile(filepath.Join(tmp, ".git-credentials")) + actContents, err := os.ReadFile(filepath.Join(tmp, ".git-credentials")) Ok(t, err) Equals(t, expContets, string(actContents)) } @@ -91,13 +91,13 @@ func TestWriteGitCreds_AppendApp(t *testing.T) { credsFile := filepath.Join(tmp, ".git-credentials") contents := "" - err := ioutil.WriteFile(credsFile, []byte(contents), 0600) + err := os.WriteFile(credsFile, []byte(contents), 0600) Ok(t, err) err = events.WriteGitCreds("x-access-token", "token", "github.com", tmp, logger, true) Ok(t, err) expContets := "https://x-access-token:token@github.com" - actContents, err := ioutil.ReadFile(filepath.Join(tmp, ".git-credentials")) + actContents, err := os.ReadFile(filepath.Join(tmp, ".git-credentials")) Ok(t, err) Equals(t, expContets, string(actContents)) } @@ -109,7 +109,7 @@ func TestWriteGitCreds_ErrIfCannotRead(t *testing.T) { defer cleanup() credsFile := filepath.Join(tmp, ".git-credentials") - err := ioutil.WriteFile(credsFile, []byte("can't see me!"), 0000) + err := os.WriteFile(credsFile, []byte("can't see me!"), 0000) Ok(t, err) expErr := fmt.Sprintf("open %s: permission denied", credsFile) diff --git a/server/events/markdown_renderer.go b/server/events/markdown_renderer.go index 36e7a0c680..9dfe07e3dc 100644 --- a/server/events/markdown_renderer.go +++ b/server/events/markdown_renderer.go @@ -371,7 +371,9 @@ var unwrappedErrTmplText = "**{{.Command}} Error**\n" + "{{.Error}}\n" + "```" + "{{ if eq .Command \"Policy Check\" }}" + - "\n* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase.\n" + + "\n* :heavy_check_mark: To **approve** failing policies an authorized approver can comment:\n" + + " * `atlantis approve_policies`\n" + + "* :repeat: Or, address the policy failure by modifying the codebase and re-planning.\n" + "{{ end }}" var wrappedErrTmplText = "**{{.Command}} Error**\n" + "
Show Output\n\n" + diff --git a/server/events/markdown_renderer_test.go b/server/events/markdown_renderer_test.go index 0679a700b6..65a61d2817 100644 --- a/server/events/markdown_renderer_test.go +++ b/server/events/markdown_renderer_test.go @@ -49,7 +49,9 @@ func TestRenderErr(t *testing.T) { models.PolicyCheckCommand, err, "**Policy Check Error**\n```\nerr\n```" + - "\n* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase.\n\n", + "\n* :heavy_check_mark: To **approve** failing policies an authorized approver can comment:\n" + + " * `atlantis approve_policies`\n" + + "* :repeat: Or, address the policy failure by modifying the codebase and re-planning.\n\n", }, } @@ -639,7 +641,9 @@ $$$ $$$ error $$$ -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. +* :heavy_check_mark: To **approve** failing policies an authorized approver can comment: + * $atlantis approve_policies$ +* :repeat: Or, address the policy failure by modifying the codebase and re-planning. --- diff --git a/server/events/mocks/mock_log_stream_url_generator.go b/server/events/mocks/mock_log_stream_url_generator.go new file mode 100644 index 0000000000..2742aa8015 --- /dev/null +++ b/server/events/mocks/mock_log_stream_url_generator.go @@ -0,0 +1,109 @@ +// Code generated by pegomock. DO NOT EDIT. +// Source: github.com/runatlantis/atlantis/server/events (interfaces: JobsUrlGenerator) + +package mocks + +import ( + pegomock "github.com/petergtz/pegomock" + models "github.com/runatlantis/atlantis/server/events/models" + "reflect" + "time" +) + +type MockJobsUrlGenerator struct { + fail func(message string, callerSkip ...int) +} + +func NewMockJobsUrlGenerator(options ...pegomock.Option) *MockJobsUrlGenerator { + mock := &MockJobsUrlGenerator{} + for _, option := range options { + option.Apply(mock) + } + return mock +} + +func (mock *MockJobsUrlGenerator) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } +func (mock *MockJobsUrlGenerator) FailHandler() pegomock.FailHandler { return mock.fail } + +func (mock *MockJobsUrlGenerator) GenerateProjectJobsUrl(pull models.PullRequest, p models.ProjectCommandContext) string { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockJobsUrlGenerator().") + } + params := []pegomock.Param{pull, p} + result := pegomock.GetGenericMockFrom(mock).Invoke("GenerateProjectJobsUrl", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) + var ret0 string + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(string) + } + } + return ret0 +} + +func (mock *MockJobsUrlGenerator) VerifyWasCalledOnce() *VerifierMockJobsUrlGenerator { + return &VerifierMockJobsUrlGenerator{ + mock: mock, + invocationCountMatcher: pegomock.Times(1), + } +} + +func (mock *MockJobsUrlGenerator) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockJobsUrlGenerator { + return &VerifierMockJobsUrlGenerator{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + } +} + +func (mock *MockJobsUrlGenerator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockJobsUrlGenerator { + return &VerifierMockJobsUrlGenerator{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + inOrderContext: inOrderContext, + } +} + +func (mock *MockJobsUrlGenerator) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockJobsUrlGenerator { + return &VerifierMockJobsUrlGenerator{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + timeout: timeout, + } +} + +type VerifierMockJobsUrlGenerator struct { + mock *MockJobsUrlGenerator + invocationCountMatcher pegomock.InvocationCountMatcher + inOrderContext *pegomock.InOrderContext + timeout time.Duration +} + +func (verifier *VerifierMockJobsUrlGenerator) GenerateProjectJobsUrl(pull models.PullRequest, p models.ProjectCommandContext) *MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification { + params := []pegomock.Param{pull, p} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GenerateProjectJobsUrl", params, verifier.timeout) + return &MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification struct { + mock *MockJobsUrlGenerator + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification) GetCapturedArguments() (models.PullRequest, models.ProjectCommandContext) { + pull, p := c.GetAllCapturedArguments() + return pull[len(pull)-1], p[len(p)-1] +} + +func (c *MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest, _param1 []models.ProjectCommandContext) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(models.PullRequest) + } + _param1 = make([]models.ProjectCommandContext, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.ProjectCommandContext) + } + } + return +} diff --git a/server/events/models/fixtures/fixtures.go b/server/events/models/fixtures/fixtures.go index 8aec38b4fb..420c2e7705 100644 --- a/server/events/models/fixtures/fixtures.go +++ b/server/events/models/fixtures/fixtures.go @@ -13,7 +13,12 @@ package fixtures -import "github.com/runatlantis/atlantis/server/events/models" +import ( + "fmt" + + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/events/yaml/valid" +) var Pull = models.PullRequest{ Num: 1, @@ -50,3 +55,11 @@ var GitlabRepo = models.Repo{ var User = models.User{ Username: "lkysow", } + +var projectName = "test-project" + +var Project = valid.Project{ + Name: &projectName, +} + +var PullInfo = fmt.Sprintf("%s/%d/%s", GithubRepo.FullName, Pull.Num, *Project.Name) diff --git a/server/events/models/models.go b/server/events/models/models.go index fb66ac1230..b978f6ba02 100644 --- a/server/events/models/models.go +++ b/server/events/models/models.go @@ -33,8 +33,14 @@ import ( const ( planfileSlashReplace = "::" + LogStreamingClearMsg = "\n-----Starting New Process-----" ) +type PullReqStatus struct { + ApprovalStatus ApprovalStatus + Mergeable bool +} + // Repo is a VCS repository. type Repo struct { // FullName is the owner and repo name separated @@ -142,6 +148,12 @@ func NewRepo(vcsHostType VCSHostType, repoFullName string, cloneURL string, vcsU }, nil } +type ApprovalStatus struct { + IsApproved bool + ApprovedBy string + Date time.Time +} + // PullRequest is a VCS pull request. // GitLab calls these Merge Requests. type PullRequest struct { @@ -364,8 +376,8 @@ type ProjectCommandContext struct { HeadRepo Repo // Log is a logger that's been set up for this context. Log logging.SimpleLogging - // PullMergeable is true if the pull request for this project is able to be merged. - PullMergeable bool + // PullReqStatus holds state about the PR that requires additional computation outside models.PullRequest + PullReqStatus PullReqStatus // CurrentProjectPlanStatus is the status of the current project prior to this command. ProjectPlanStatus ProjectPlanStatus // Pull is the pull request we're responding to. @@ -411,6 +423,23 @@ func (p ProjectCommandContext) GetShowResultFileName() string { return fmt.Sprintf("%s-%s.json", projName, p.Workspace) } +// Gets a unique identifier for the current pull request as a single string +func (p ProjectCommandContext) PullInfo() string { + return BuildPullInfo(p.BaseRepo.FullName, p.Pull.Num, p.ProjectName, p.RepoRelDir, p.Workspace) +} + +func BuildPullInfo(repoName string, pullNum int, projectName string, relDir string, workspace string) string { + projectIdentifier := GetProjectIdentifier(relDir, projectName) + return fmt.Sprintf("%s/%d/%s/%s", repoName, pullNum, projectIdentifier, workspace) +} + +func GetProjectIdentifier(relRepoDir string, projectName string) string { + if projectName != "" { + return projectName + } + return strings.ReplaceAll(relRepoDir, "/", "-") +} + // SplitRepoFullName splits a repo full name up into its owner and repo // name segments. If the repoFullName is malformed, may return empty // strings for owner or repo. @@ -654,6 +683,14 @@ func (c CommandName) TitleString() string { return strings.Title(strings.ReplaceAll(strings.ToLower(c.String()), "_", " ")) } +type ProjectCmdOutputLine struct { + ProjectInfo string + + Line string + + ClearBuffBefore bool +} + // String returns the string representation of c. func (c CommandName) String() string { switch c { diff --git a/server/events/pending_plan_finder.go b/server/events/pending_plan_finder.go index bff75b6230..38b8bbdec7 100644 --- a/server/events/pending_plan_finder.go +++ b/server/events/pending_plan_finder.go @@ -1,7 +1,6 @@ package events import ( - "io/ioutil" "os" "os/exec" "path/filepath" @@ -43,7 +42,7 @@ func (p *DefaultPendingPlanFinder) Find(pullDir string) ([]PendingPlan, error) { } func (p *DefaultPendingPlanFinder) findWithAbsPaths(pullDir string) ([]PendingPlan, []string, error) { - workspaceDirs, err := ioutil.ReadDir(pullDir) + workspaceDirs, err := os.ReadDir(pullDir) if err != nil { return nil, nil, err } diff --git a/server/events/pre_workflow_hooks_command_runner_test.go b/server/events/pre_workflow_hooks_command_runner_test.go index e384a52353..3476e8446b 100644 --- a/server/events/pre_workflow_hooks_command_runner_test.go +++ b/server/events/pre_workflow_hooks_command_runner_test.go @@ -74,7 +74,7 @@ func TestRunPreHooks_Clone(t *testing.T) { t.Run("success hooks in cfg", func(t *testing.T) { preWorkflowHooksSetup(t) - var unlockCalled *bool = newBool(false) + var unlockCalled = newBool(false) unlockFn := func() { unlockCalled = newBool(true) } @@ -159,7 +159,7 @@ func TestRunPreHooks_Clone(t *testing.T) { t.Run("error cloning", func(t *testing.T) { preWorkflowHooksSetup(t) - var unlockCalled *bool = newBool(false) + var unlockCalled = newBool(false) unlockFn := func() { unlockCalled = newBool(true) } @@ -191,7 +191,7 @@ func TestRunPreHooks_Clone(t *testing.T) { t.Run("error running pre hook", func(t *testing.T) { preWorkflowHooksSetup(t) - var unlockCalled *bool = newBool(false) + var unlockCalled = newBool(false) unlockFn := func() { unlockCalled = newBool(true) } diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go index 9d8891907e..087aaf78ad 100644 --- a/server/events/project_command_builder_internal_test.go +++ b/server/events/project_command_builder_internal_test.go @@ -1,7 +1,7 @@ package events import ( - "io/ioutil" + "os" "path/filepath" "testing" @@ -66,16 +66,18 @@ workflows: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -116,18 +118,20 @@ projects: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -168,18 +172,20 @@ projects: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{"approved", "mergeable"}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{"approved", "mergeable"}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -228,18 +234,20 @@ projects: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{"approved"}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{"approved"}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{}, @@ -375,18 +383,20 @@ workflows: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -431,18 +441,20 @@ projects: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -490,18 +502,20 @@ workflows: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPlanSteps: []string{}, expApplySteps: []string{}, @@ -533,17 +547,19 @@ projects: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{"approved"}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{"approved"}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -571,7 +587,7 @@ projects: // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") - Ok(t, ioutil.WriteFile(globalCfgPath, []byte(c.globalCfg), 0600)) + Ok(t, os.WriteFile(globalCfgPath, []byte(c.globalCfg), 0600)) parser := &yaml.ParserValidator{} globalCfgArgs := valid.GlobalCfgArgs{ AllowRepoCfg: false, @@ -583,7 +599,7 @@ projects: Ok(t, err) if c.repoCfg != "" { - Ok(t, ioutil.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) + Ok(t, os.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) } builder := NewProjectCommandBuilder( @@ -609,7 +625,9 @@ projects: Pull: models.PullRequest{ BaseRepo: baseRepo, }, - PullMergeable: true, + PullRequestStatus: models.PullReqStatus{ + Mergeable: true, + }, }, cmd, "", []string{"flag"}, tmp, "project1", "myworkspace", true) if c.expErr != "" { @@ -725,18 +743,20 @@ projects: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logging.NewNoopLogger(t), - PullMergeable: true, - Pull: pull, - ProjectName: "myproject_1", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -p myproject_1 -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "myproject_1", + ApplyRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -p myproject_1 -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -764,13 +784,13 @@ projects: // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") - Ok(t, ioutil.WriteFile(globalCfgPath, []byte(c.globalCfg), 0600)) + Ok(t, os.WriteFile(globalCfgPath, []byte(c.globalCfg), 0600)) parser := &yaml.ParserValidator{} globalCfg, err := parser.ParseGlobalCfg(globalCfgPath, valid.NewGlobalCfg(false, false, false)) Ok(t, err) if c.repoCfg != "" { - Ok(t, ioutil.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) + Ok(t, os.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) } builder := NewProjectCommandBuilder( @@ -795,8 +815,10 @@ projects: Pull: models.PullRequest{ BaseRepo: baseRepo, }, - Log: logging.NewNoopLogger(t), - PullMergeable: true, + Log: logging.NewNoopLogger(t), + PullRequestStatus: models.PullReqStatus{ + Mergeable: true, + }, }, cmd, "myproject_[1-2]", []string{"flag"}, tmp, "project1", "myworkspace", true) if c.expErr != "" { @@ -877,16 +899,18 @@ repos: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPolicyCheckSteps: []string{"show", "policy_check"}, }, @@ -932,18 +956,20 @@ workflows: AutoplanEnabled: true, HeadRepo: models.Repo{}, Log: logger, - PullMergeable: true, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Verbose: true, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, + PullReqStatus: models.PullReqStatus{ + Mergeable: true, + }, + Pull: pull, + ProjectName: "", + ApplyRequirements: []string{}, + RepoConfigVersion: 3, + RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", + RepoRelDir: "project1", + TerraformVersion: mustVersion("10.0"), + User: models.User{}, + Verbose: true, + Workspace: "myworkspace", + PolicySets: emptyPolicySets, }, expPolicyCheckSteps: []string{"policy_check"}, }, @@ -970,7 +996,7 @@ workflows: // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") - Ok(t, ioutil.WriteFile(globalCfgPath, []byte(c.globalCfg), 0600)) + Ok(t, os.WriteFile(globalCfgPath, []byte(c.globalCfg), 0600)) parser := &yaml.ParserValidator{} globalCfgArgs := valid.GlobalCfgArgs{ AllowRepoCfg: false, @@ -983,7 +1009,7 @@ workflows: Ok(t, err) if c.repoCfg != "" { - Ok(t, ioutil.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) + Ok(t, os.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) } builder := NewProjectCommandBuilder( @@ -1008,7 +1034,9 @@ workflows: Pull: models.PullRequest{ BaseRepo: baseRepo, }, - PullMergeable: true, + PullRequestStatus: models.PullReqStatus{ + Mergeable: true, + }, }, models.PlanCommand, "", []string{"flag"}, tmp, "project1", "myworkspace", true) if c.expErr != "" { diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go index 14b43c3621..8545164d94 100644 --- a/server/events/project_command_builder_test.go +++ b/server/events/project_command_builder_test.go @@ -2,7 +2,7 @@ package events_test import ( "fmt" - "io/ioutil" + "os" "path/filepath" "strings" "testing" @@ -132,7 +132,7 @@ projects: vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) if c.AtlantisYAML != "" { - err := ioutil.WriteFile(filepath.Join(tmpDir, yaml.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600) + err := os.WriteFile(filepath.Join(tmpDir, yaml.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600) Ok(t, err) } @@ -159,8 +159,10 @@ projects: ) ctxs, err := builder.BuildAutoplanCommands(&events.CommandContext{ - PullMergeable: true, - Log: logger, + PullRequestStatus: models.PullReqStatus{ + Mergeable: true, + }, + Log: logger, }) Ok(t, err) Equals(t, len(c.exp), len(ctxs)) @@ -394,7 +396,7 @@ projects: vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) if c.AtlantisYAML != "" { - err := ioutil.WriteFile(filepath.Join(tmpDir, yaml.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600) + err := os.WriteFile(filepath.Join(tmpDir, yaml.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600) Ok(t, err) } @@ -545,7 +547,7 @@ projects: vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn(c.ModifiedFiles, nil) if c.AtlantisYAML != "" { - err := ioutil.WriteFile(filepath.Join(tmpDir, yaml.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600) + err := os.WriteFile(filepath.Join(tmpDir, yaml.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600) Ok(t, err) } @@ -703,7 +705,7 @@ projects: - dir: . workspace: staging ` - err := ioutil.WriteFile(filepath.Join(repoDir, yaml.AtlantisYAMLFilename), []byte(yamlCfg), 0600) + err := os.WriteFile(filepath.Join(repoDir, yaml.AtlantisYAMLFilename), []byte(yamlCfg), 0600) Ok(t, err) When(workingDir.Clone( @@ -1058,11 +1060,13 @@ projects: var actCtxs []models.ProjectCommandContext var err error actCtxs, err = builder.BuildAutoplanCommands(&events.CommandContext{ - HeadRepo: models.Repo{}, - Pull: models.PullRequest{}, - User: models.User{}, - Log: logger, - PullMergeable: true, + HeadRepo: models.Repo{}, + Pull: models.PullRequest{}, + User: models.User{}, + Log: logger, + PullRequestStatus: models.PullReqStatus{ + Mergeable: true, + }, }) Ok(t, err) Equals(t, 0, len(actCtxs)) @@ -1108,8 +1112,10 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman ) ctxs, err := builder.BuildAutoplanCommands(&events.CommandContext{ - PullMergeable: true, - Log: logger, + PullRequestStatus: models.PullReqStatus{ + Mergeable: true, + }, + Log: logger, }) Ok(t, err) diff --git a/server/events/project_command_context_builder.go b/server/events/project_command_context_builder.go index da5a0810f3..7f72ec755c 100644 --- a/server/events/project_command_context_builder.go +++ b/server/events/project_command_context_builder.go @@ -70,7 +70,7 @@ func (cb *DefaultProjectCommandContextBuilder) BuildProjectContext( prjCfg.TerraformVersion = getTfVersion(ctx, filepath.Join(repoDir, prjCfg.RepoRelDir)) } - projectCmds = append(projectCmds, newProjectCommandContext( + projectCmdContext := newProjectCommandContext( ctx, cmdName, cb.CommentBuilder.BuildApplyComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, prjCfg.AutoMergeDisabled), @@ -85,7 +85,10 @@ func (cb *DefaultProjectCommandContextBuilder) BuildProjectContext( parallelApply, parallelPlan, verbose, - )) + ctx.PullRequestStatus, + ) + + projectCmds = append(projectCmds, projectCmdContext) return } @@ -143,6 +146,7 @@ func (cb *PolicyCheckProjectCommandContextBuilder) BuildProjectContext( parallelApply, parallelPlan, verbose, + ctx.PullRequestStatus, )) } @@ -165,6 +169,7 @@ func newProjectCommandContext(ctx *CommandContext, parallelApplyEnabled bool, parallelPlanEnabled bool, verbose bool, + pullStatus models.PullReqStatus, ) models.ProjectCommandContext { var projectPlanStatus models.ProjectPlanStatus @@ -186,31 +191,32 @@ func newProjectCommandContext(ctx *CommandContext, } return models.ProjectCommandContext{ - CommandName: cmd, - ApplyCmd: applyCmd, - BaseRepo: ctx.Pull.BaseRepo, - EscapedCommentArgs: escapedCommentArgs, - AutomergeEnabled: automergeEnabled, - DeleteSourceBranchOnMerge: deleteSourceBranchOnMerge, - ParallelApplyEnabled: parallelApplyEnabled, - ParallelPlanEnabled: parallelPlanEnabled, - AutoplanEnabled: projCfg.AutoplanEnabled, - Steps: steps, - HeadRepo: ctx.HeadRepo, - Log: ctx.Log, - PullMergeable: ctx.PullMergeable, - ProjectPlanStatus: projectPlanStatus, - Pull: ctx.Pull, - ProjectName: projCfg.Name, - ApplyRequirements: projCfg.ApplyRequirements, - RePlanCmd: planCmd, - RepoRelDir: projCfg.RepoRelDir, - RepoConfigVersion: projCfg.RepoCfgVersion, - TerraformVersion: projCfg.TerraformVersion, - User: ctx.User, - Verbose: verbose, - Workspace: projCfg.Workspace, - PolicySets: policySets, + CommandName: cmd, + ApplyCmd: applyCmd, + BaseRepo: ctx.Pull.BaseRepo, + EscapedCommentArgs: escapedCommentArgs, + AutomergeEnabled: automergeEnabled, + DeleteSourceBranchOnMerge: deleteSourceBranchOnMerge, + ParallelApplyEnabled: parallelApplyEnabled, + ParallelPlanEnabled: parallelPlanEnabled, + ParallelPolicyCheckEnabled: parallelPlanEnabled, + AutoplanEnabled: projCfg.AutoplanEnabled, + Steps: steps, + HeadRepo: ctx.HeadRepo, + Log: ctx.Log, + ProjectPlanStatus: projectPlanStatus, + Pull: ctx.Pull, + ProjectName: projCfg.Name, + ApplyRequirements: projCfg.ApplyRequirements, + RePlanCmd: planCmd, + RepoRelDir: projCfg.RepoRelDir, + RepoConfigVersion: projCfg.RepoCfgVersion, + TerraformVersion: projCfg.TerraformVersion, + User: ctx.User, + Verbose: verbose, + Workspace: projCfg.Workspace, + PolicySets: policySets, + PullReqStatus: pullStatus, } } diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index 6f9eacc4d5..c648c73b7e 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -20,9 +20,11 @@ import ( "strings" "github.com/pkg/errors" + "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/webhooks" "github.com/runatlantis/atlantis/server/events/yaml/valid" + "github.com/runatlantis/atlantis/server/handlers" "github.com/runatlantis/atlantis/server/logging" ) @@ -113,6 +115,50 @@ type ProjectCommandRunner interface { ProjectVersionCommandRunner } +// ProjectOutputWrapper is a decorator that creates a new PR status check per project. +// The status contains a url that outputs current progress of the terraform plan/apply command. +type ProjectOutputWrapper struct { + ProjectCommandRunner + ProjectCmdOutputHandler handlers.ProjectCommandOutputHandler +} + +func (p *ProjectOutputWrapper) Plan(ctx models.ProjectCommandContext) models.ProjectResult { + // Reset the buffer when running the plan. We only need to do this for plan, + // apply is a continuation of the same workflow + p.ProjectCmdOutputHandler.Clear(ctx) + return p.updateProjectPRStatus(models.PlanCommand, ctx, p.ProjectCommandRunner.Plan) +} + +func (p *ProjectOutputWrapper) Apply(ctx models.ProjectCommandContext) models.ProjectResult { + return p.updateProjectPRStatus(models.ApplyCommand, ctx, p.ProjectCommandRunner.Apply) +} + +func (p *ProjectOutputWrapper) updateProjectPRStatus(commandName models.CommandName, ctx models.ProjectCommandContext, execute func(ctx models.ProjectCommandContext) models.ProjectResult) models.ProjectResult { + // Create a PR status to track project's plan status. The status will + // include a link to view the progress of atlantis plan command in real + // time + if err := p.ProjectCmdOutputHandler.SetJobURLWithStatus(ctx, commandName, models.PendingCommitStatus); err != nil { + ctx.Log.Err("updating project PR status", err) + } + + // ensures we are differentiating between project level command and overall command + result := execute(ctx) + + if result.Error != nil || result.Failure != "" { + if err := p.ProjectCmdOutputHandler.SetJobURLWithStatus(ctx, commandName, models.FailedCommitStatus); err != nil { + ctx.Log.Err("updating project PR status", err) + } + + return result + } + + if err := p.ProjectCmdOutputHandler.SetJobURLWithStatus(ctx, commandName, models.SuccessCommitStatus); err != nil { + ctx.Log.Err("updating project PR status", err) + } + + return result +} + // DefaultProjectCommandRunner implements ProjectCommandRunner. type DefaultProjectCommandRunner struct { Locker ProjectLocker @@ -125,6 +171,7 @@ type DefaultProjectCommandRunner struct { VersionStepRunner StepRunner RunStepRunner CustomStepRunner EnvStepRunner EnvStepRunner + PullApprovedChecker runtime.PullApprovedChecker WorkingDir WorkingDir Webhooks WebhooksSender WorkingDirLocker WorkingDirLocker @@ -312,6 +359,7 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx models.ProjectCommandContext) ( } outputs, err := p.runSteps(ctx.Steps, ctx, projAbsPath) + if err != nil { if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) @@ -354,6 +402,7 @@ func (p *DefaultProjectCommandRunner) doApply(ctx models.ProjectCommandContext) defer unlockFn() outputs, err := p.runSteps(ctx.Steps, ctx, absPath) + p.Webhooks.Send(ctx.Log, webhooks.ApplyResult{ // nolint: errcheck Workspace: ctx.Workspace, User: ctx.User, @@ -362,9 +411,11 @@ func (p *DefaultProjectCommandRunner) doApply(ctx models.ProjectCommandContext) Success: err == nil, Directory: ctx.RepoRelDir, }) + if err != nil { return "", "", fmt.Errorf("%s\n%s", err, strings.Join(outputs, "\n")) } + return strings.Join(outputs, "\n"), "", nil } @@ -398,6 +449,7 @@ func (p *DefaultProjectCommandRunner) doVersion(ctx models.ProjectCommandContext func (p *DefaultProjectCommandRunner) runSteps(steps []valid.Step, ctx models.ProjectCommandContext, absPath string) ([]string, error) { var outputs []string + envs := make(map[string]string) for _, step := range steps { var out string diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go index de255c4827..5c63eb877d 100644 --- a/server/events/project_command_runner_test.go +++ b/server/events/project_command_runner_test.go @@ -14,19 +14,21 @@ package events_test import ( + "errors" + "fmt" "os" "testing" "github.com/hashicorp/go-version" . "github.com/petergtz/pegomock" "github.com/runatlantis/atlantis/server/core/runtime" - mocks2 "github.com/runatlantis/atlantis/server/core/runtime/mocks" tmocks "github.com/runatlantis/atlantis/server/core/terraform/mocks" "github.com/runatlantis/atlantis/server/events" "github.com/runatlantis/atlantis/server/events/mocks" "github.com/runatlantis/atlantis/server/events/mocks/matchers" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/yaml/valid" + handlermocks "github.com/runatlantis/atlantis/server/handlers/mocks" "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) @@ -51,6 +53,7 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { ApplyStepRunner: mockApply, RunStepRunner: mockRun, EnvStepRunner: &realEnv, + PullApprovedChecker: nil, WorkingDir: mockWorkingDir, Webhooks: nil, WorkingDirLocker: events.NewDefaultWorkingDirLocker(), @@ -113,7 +116,6 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { Assert(t, res.PlanSuccess != nil, "exp plan success") Equals(t, "https://lock-key", res.PlanSuccess.LockURL) Equals(t, "run\napply\nplan\ninit", res.PlanSuccess.TerraformOutput) - expSteps := []string{"run", "apply", "plan", "init", "env"} for _, step := range expSteps { switch step { @@ -129,6 +131,112 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { } } +func TestProjectOutputWrapper(t *testing.T) { + RegisterMockTestingT(t) + ctx := models.ProjectCommandContext{ + Log: logging.NewNoopLogger(t), + Steps: []valid.Step{ + { + StepName: "plan", + }, + }, + Workspace: "default", + RepoRelDir: ".", + } + + cases := []struct { + Description string + Failure bool + Error bool + Success bool + CommandName models.CommandName + }{ + { + Description: "plan success", + Success: true, + CommandName: models.PlanCommand, + }, + { + Description: "plan failure", + Failure: true, + CommandName: models.PlanCommand, + }, + { + Description: "plan error", + Error: true, + CommandName: models.PlanCommand, + }, + { + Description: "apply success", + Success: true, + CommandName: models.ApplyCommand, + }, + { + Description: "apply failure", + Failure: true, + CommandName: models.ApplyCommand, + }, + { + Description: "apply error", + Error: true, + CommandName: models.ApplyCommand, + }, + } + + for _, c := range cases { + t.Run(c.Description, func(t *testing.T) { + var prjResult models.ProjectResult + var expCommitStatus models.CommitStatus + + mockProjectCommandOutputHandler := handlermocks.NewMockProjectCommandOutputHandler() + mockProjectCommandRunner := mocks.NewMockProjectCommandRunner() + + runner := &events.ProjectOutputWrapper{ + ProjectCmdOutputHandler: mockProjectCommandOutputHandler, + ProjectCommandRunner: mockProjectCommandRunner, + } + + if c.Success { + prjResult = models.ProjectResult{ + PlanSuccess: &models.PlanSuccess{}, + ApplySuccess: "exists", + } + expCommitStatus = models.SuccessCommitStatus + } else if c.Failure { + prjResult = models.ProjectResult{ + Failure: "failure", + } + expCommitStatus = models.FailedCommitStatus + } else if c.Error { + prjResult = models.ProjectResult{ + Error: errors.New("error"), + } + expCommitStatus = models.FailedCommitStatus + } + + When(mockProjectCommandRunner.Plan(matchers.AnyModelsProjectCommandContext())).ThenReturn(prjResult) + When(mockProjectCommandRunner.Apply(matchers.AnyModelsProjectCommandContext())).ThenReturn(prjResult) + + switch c.CommandName { + case models.PlanCommand: + runner.Plan(ctx) + case models.ApplyCommand: + runner.Apply(ctx) + } + + mockProjectCommandOutputHandler.VerifyWasCalled(Once()).SetJobURLWithStatus(ctx, c.CommandName, models.PendingCommitStatus) + mockProjectCommandOutputHandler.VerifyWasCalled(Once()).SetJobURLWithStatus(ctx, c.CommandName, expCommitStatus) + + switch c.CommandName { + case models.PlanCommand: + mockProjectCommandRunner.VerifyWasCalledOnce().Plan(ctx) + case models.ApplyCommand: + mockProjectCommandRunner.VerifyWasCalledOnce().Apply(ctx) + } + }) + } +} + // Test what happens if there's no working dir. This signals that the project // was never planned. func TestDefaultProjectCommandRunner_ApplyNotCloned(t *testing.T) { @@ -147,13 +255,11 @@ func TestDefaultProjectCommandRunner_ApplyNotCloned(t *testing.T) { func TestDefaultProjectCommandRunner_ApplyNotApproved(t *testing.T) { RegisterMockTestingT(t) mockWorkingDir := mocks.NewMockWorkingDir() - mockApproved := mocks2.NewMockPullApprovedChecker() runner := &events.DefaultProjectCommandRunner{ WorkingDir: mockWorkingDir, WorkingDirLocker: events.NewDefaultWorkingDirLocker(), AggregateApplyRequirements: &events.AggregateApplyRequirements{ - PullApprovedChecker: mockApproved, - WorkingDir: mockWorkingDir, + WorkingDir: mockWorkingDir, }, } ctx := models.ProjectCommandContext{ @@ -162,7 +268,6 @@ func TestDefaultProjectCommandRunner_ApplyNotApproved(t *testing.T) { tmp, cleanup := TempDir(t) defer cleanup() When(mockWorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, ctx.Workspace)).ThenReturn(tmp, nil) - When(mockApproved.PullIsApproved(ctx.BaseRepo, ctx.Pull)).ThenReturn(false, nil) res := runner.Apply(ctx) Equals(t, "Pull request must be approved by at least one person other than the author before running apply.", res.Failure) @@ -180,7 +285,9 @@ func TestDefaultProjectCommandRunner_ApplyNotMergeable(t *testing.T) { }, } ctx := models.ProjectCommandContext{ - PullMergeable: false, + PullReqStatus: models.PullReqStatus{ + Mergeable: false, + }, ApplyRequirements: []string{"mergeable"}, } tmp, cleanup := TempDir(t) @@ -300,13 +407,11 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { mockApply := mocks.NewMockStepRunner() mockRun := mocks.NewMockCustomStepRunner() mockEnv := mocks.NewMockEnvStepRunner() - mockApproved := mocks2.NewMockPullApprovedChecker() mockWorkingDir := mocks.NewMockWorkingDir() mockLocker := mocks.NewMockProjectLocker() mockSender := mocks.NewMockWebhooksSender() applyReqHandler := &events.AggregateApplyRequirements{ - PullApprovedChecker: mockApproved, - WorkingDir: mockWorkingDir, + WorkingDir: mockWorkingDir, } runner := events.DefaultProjectCommandRunner{ @@ -336,7 +441,12 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { Workspace: "default", ApplyRequirements: c.applyReqs, RepoRelDir: ".", - PullMergeable: c.pullMergeable, + PullReqStatus: models.PullReqStatus{ + ApprovalStatus: models.ApprovalStatus{ + IsApproved: true, + }, + Mergeable: true, + }, } expEnvs := map[string]string{ "key": "value", @@ -346,7 +456,6 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { When(mockApply.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("apply", nil) When(mockRun.Run(ctx, "", repoDir, expEnvs)).ThenReturn("run", nil) When(mockEnv.Run(ctx, "", "value", repoDir, make(map[string]string))).ThenReturn("value", nil) - When(mockApproved.PullIsApproved(ctx.BaseRepo, ctx.Pull)).ThenReturn(true, nil) res := runner.Apply(ctx) Equals(t, c.expOut, res.ApplySuccess) @@ -354,8 +463,6 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { for _, step := range c.expSteps { switch step { - case "approved": - mockApproved.VerifyWasCalledOnce().PullIsApproved(ctx.BaseRepo, ctx.Pull) case "init": mockInit.VerifyWasCalledOnce().Run(ctx, nil, repoDir, expEnvs) case "plan": @@ -372,6 +479,54 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { } } +// Test that it runs the expected apply steps. +func TestDefaultProjectCommandRunner_ApplyRunStepFailure(t *testing.T) { + RegisterMockTestingT(t) + mockApply := mocks.NewMockStepRunner() + mockWorkingDir := mocks.NewMockWorkingDir() + mockLocker := mocks.NewMockProjectLocker() + mockSender := mocks.NewMockWebhooksSender() + applyReqHandler := &events.AggregateApplyRequirements{ + WorkingDir: mockWorkingDir, + } + + runner := events.DefaultProjectCommandRunner{ + Locker: mockLocker, + LockURLGenerator: mockURLGenerator{}, + ApplyStepRunner: mockApply, + WorkingDir: mockWorkingDir, + WorkingDirLocker: events.NewDefaultWorkingDirLocker(), + AggregateApplyRequirements: applyReqHandler, + Webhooks: mockSender, + } + repoDir, cleanup := TempDir(t) + defer cleanup() + When(mockWorkingDir.GetWorkingDir( + matchers.AnyModelsRepo(), + matchers.AnyModelsPullRequest(), + AnyString(), + )).ThenReturn(repoDir, nil) + + ctx := models.ProjectCommandContext{ + Log: logging.NewNoopLogger(t), + Steps: []valid.Step{ + { + StepName: "apply", + }, + }, + Workspace: "default", + ApplyRequirements: []string{}, + RepoRelDir: ".", + } + expEnvs := map[string]string{} + When(mockApply.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("apply", fmt.Errorf("something went wrong")) + + res := runner.Apply(ctx) + Assert(t, res.ApplySuccess == "", "exp apply failure") + + mockApply.VerifyWasCalledOnce().Run(ctx, nil, repoDir, expEnvs) +} + // Test run and env steps. We don't use mocks for this test since we're // not running any Terraform. func TestDefaultProjectCommandRunner_RunEnvSteps(t *testing.T) { diff --git a/server/events/project_finder_test.go b/server/events/project_finder_test.go index b4e523ae3c..8135a5e931 100644 --- a/server/events/project_finder_test.go +++ b/server/events/project_finder_test.go @@ -14,7 +14,6 @@ package events_test import ( - "io/ioutil" "os" "path/filepath" "testing" @@ -46,7 +45,7 @@ func setupTmpRepos(t *testing.T) { // modules/ // main.tf var err error - nestedModules1, err = ioutil.TempDir("", "") + nestedModules1, err = os.MkdirTemp("", "") Ok(t, err) err = os.MkdirAll(filepath.Join(nestedModules1, "project1/modules"), 0700) Ok(t, err) @@ -78,7 +77,7 @@ func setupTmpRepos(t *testing.T) { // main.tf // project2/ // main.tf - topLevelModules, err = ioutil.TempDir("", "") + topLevelModules, err = os.MkdirTemp("", "") Ok(t, err) for _, path := range []string{"modules", "project1", "project2"} { err = os.MkdirAll(filepath.Join(topLevelModules, path), 0700) @@ -93,7 +92,7 @@ func setupTmpRepos(t *testing.T) { // staging.tfvars // production.tfvars // global-env-config.auto.tfvars.json - envDir, err = ioutil.TempDir("", "") + envDir, err = os.MkdirTemp("", "") Ok(t, err) err = os.MkdirAll(filepath.Join(envDir, "env"), 0700) Ok(t, err) diff --git a/server/events/pull_closed_executor.go b/server/events/pull_closed_executor.go index 2ecaebe042..e20fb4b50d 100644 --- a/server/events/pull_closed_executor.go +++ b/server/events/pull_closed_executor.go @@ -16,6 +16,7 @@ package events import ( "bytes" "fmt" + "io" "sort" "strings" "text/template" @@ -28,6 +29,7 @@ import ( "github.com/runatlantis/atlantis/server/core/locking" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs" + "github.com/runatlantis/atlantis/server/handlers" ) //go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_pull_cleaner.go PullCleaner @@ -42,11 +44,13 @@ type PullCleaner interface { // PullClosedExecutor executes the tasks required to clean up a closed pull // request. type PullClosedExecutor struct { - Locker locking.Locker - VCSClient vcs.Client - WorkingDir WorkingDir - Logger logging.SimpleLogging - DB *db.BoltDB + Locker locking.Locker + VCSClient vcs.Client + WorkingDir WorkingDir + Logger logging.SimpleLogging + DB *db.BoltDB + PullClosedTemplate PullCleanupTemplate + LogStreamResourceCleaner handlers.ResourceCleaner } type templatedProject struct { @@ -59,8 +63,31 @@ var pullClosedTemplate = template.Must(template.New("").Parse( "{{ range . }}\n" + "- dir: `{{ .RepoRelDir }}` {{ .Workspaces }}{{ end }}")) +type PullCleanupTemplate interface { + Execute(wr io.Writer, data interface{}) error +} + +type PullClosedEventTemplate struct{} + +func (t *PullClosedEventTemplate) Execute(wr io.Writer, data interface{}) error { + return pullClosedTemplate.Execute(wr, data) +} + // CleanUpPull cleans up after a closed pull request. func (p *PullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullRequest) error { + pullStatus, err := p.DB.GetPullStatus(pull) + if err != nil { + // Log and continue to clean up other resources. + p.Logger.Err("retrieving pull status: %s", err) + } + + if pullStatus != nil { + for _, project := range pullStatus.Projects { + projectKey := models.BuildPullInfo(pullStatus.Pull.BaseRepo.FullName, pull.Num, project.ProjectName, project.RepoRelDir, project.Workspace) + p.LogStreamResourceCleaner.CleanUp(projectKey) + } + } + if err := p.WorkingDir.Delete(repo, pull); err != nil { return errors.Wrap(err, "cleaning workspace") } diff --git a/server/events/pull_closed_executor_test.go b/server/events/pull_closed_executor_test.go index e5d5953bbf..c1d1f8f9bb 100644 --- a/server/events/pull_closed_executor_test.go +++ b/server/events/pull_closed_executor_test.go @@ -14,10 +14,14 @@ package events_test import ( - "errors" + "io/ioutil" "testing" + "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/core/db" + "github.com/runatlantis/atlantis/server/handlers" + "github.com/stretchr/testify/assert" + bolt "go.etcd.io/bbolt" . "github.com/petergtz/pegomock" lockmocks "github.com/runatlantis/atlantis/server/core/locking/mocks" @@ -27,6 +31,8 @@ import ( "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/models/fixtures" vcsmocks "github.com/runatlantis/atlantis/server/events/vcs/mocks" + handlermocks "github.com/runatlantis/atlantis/server/handlers/mocks" + loggermocks "github.com/runatlantis/atlantis/server/logging/mocks" . "github.com/runatlantis/atlantis/testing" ) @@ -34,10 +40,16 @@ func TestCleanUpPullWorkspaceErr(t *testing.T) { t.Log("when workspace.Delete returns an error, we return it") RegisterMockTestingT(t) w := mocks.NewMockWorkingDir() + tmp, cleanup := TempDir(t) + defer cleanup() + db, err := db.New(tmp) + Ok(t, err) pce := events.PullClosedExecutor{ - WorkingDir: w, + WorkingDir: w, + PullClosedTemplate: &events.PullClosedEventTemplate{}, + DB: db, } - err := errors.New("err") + err = errors.New("err") When(w.Delete(fixtures.GithubRepo, fixtures.Pull)).ThenReturn(err) actualErr := pce.CleanUpPull(fixtures.GithubRepo, fixtures.Pull) Equals(t, "cleaning workspace: err", actualErr.Error()) @@ -48,11 +60,17 @@ func TestCleanUpPullUnlockErr(t *testing.T) { RegisterMockTestingT(t) w := mocks.NewMockWorkingDir() l := lockmocks.NewMockLocker() + tmp, cleanup := TempDir(t) + defer cleanup() + db, err := db.New(tmp) + Ok(t, err) pce := events.PullClosedExecutor{ - Locker: l, - WorkingDir: w, + Locker: l, + WorkingDir: w, + DB: db, + PullClosedTemplate: &events.PullClosedEventTemplate{}, } - err := errors.New("err") + err = errors.New("err") When(l.UnlockByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(nil, err) actualErr := pce.CleanUpPull(fixtures.GithubRepo, fixtures.Pull) Equals(t, "cleaning up locks: err", actualErr.Error()) @@ -171,3 +189,100 @@ func TestCleanUpPullComments(t *testing.T) { }() } } + +func TestCleanUpLogStreaming(t *testing.T) { + RegisterMockTestingT(t) + + t.Run("Should Clean Up Log Streaming Resources When PR is closed", func(t *testing.T) { + prjStatusUpdater := handlermocks.NewMockProjectStatusUpdater() + prjJobURLGenerator := handlermocks.NewMockProjectJobURLGenerator() + + // Create Log streaming resources + prjCmdOutput := make(chan *models.ProjectCmdOutputLine) + prjCmdOutHandler := handlers.NewAsyncProjectCommandOutputHandler(prjCmdOutput, prjStatusUpdater, prjJobURLGenerator, logger) + ctx := models.ProjectCommandContext{ + BaseRepo: fixtures.GithubRepo, + Pull: fixtures.Pull, + ProjectName: *fixtures.Project.Name, + Workspace: "default", + } + + go prjCmdOutHandler.Handle() + prjCmdOutHandler.Send(ctx, "Test Message") + + // Create boltdb and add pull request. + var lockBucket = "bucket" + var configBucket = "configBucket" + var pullsBucketName = "pulls" + + f, err := ioutil.TempFile("", "") + if err != nil { + panic(errors.Wrap(err, "failed to create temp file")) + } + path := f.Name() + f.Close() // nolint: errcheck + + // Open the database. + boltDB, err := bolt.Open(path, 0600, nil) + if err != nil { + panic(errors.Wrap(err, "could not start bolt DB")) + } + if err := boltDB.Update(func(tx *bolt.Tx) error { + if _, err := tx.CreateBucketIfNotExists([]byte(pullsBucketName)); err != nil { + return errors.Wrap(err, "failed to create bucket") + } + return nil + }); err != nil { + panic(errors.Wrap(err, "could not create bucket")) + } + db, _ := db.NewWithDB(boltDB, lockBucket, configBucket) + result := []models.ProjectResult{ + { + RepoRelDir: fixtures.GithubRepo.FullName, + Workspace: "default", + ProjectName: *fixtures.Project.Name, + }, + } + + // Create a new record for pull + _, err = db.UpdatePullWithResults(fixtures.Pull, result) + Ok(t, err) + + workingDir := mocks.NewMockWorkingDir() + locker := lockmocks.NewMockLocker() + client := vcsmocks.NewMockClient() + logger := loggermocks.NewMockSimpleLogging() + + pullClosedExecutor := events.PullClosedExecutor{ + Locker: locker, + WorkingDir: workingDir, + DB: db, + VCSClient: client, + PullClosedTemplate: &events.PullClosedEventTemplate{}, + LogStreamResourceCleaner: prjCmdOutHandler, + Logger: logger, + } + + locks := []models.ProjectLock{ + { + Project: models.NewProject(fixtures.GithubRepo.FullName, ""), + Workspace: "default", + }, + } + When(locker.UnlockByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(locks, nil) + + // Clean up. + err = pullClosedExecutor.CleanUpPull(fixtures.GithubRepo, fixtures.Pull) + Ok(t, err) + + close(prjCmdOutput) + _, _, comment, _ := client.VerifyWasCalledOnce().CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString(), AnyString()).GetCapturedArguments() + expectedComment := "Locks and plans deleted for the projects and workspaces modified in this pull request:\n\n" + "- dir: `.` workspace: `default`" + Equals(t, expectedComment, comment) + + // Assert log streaming resources are cleaned up. + dfPrjCmdOutputHandler := prjCmdOutHandler.(*handlers.AsyncProjectCommandOutputHandler) + assert.Empty(t, dfPrjCmdOutputHandler.GetProjectOutputBuffer(ctx.PullInfo())) + assert.Empty(t, dfPrjCmdOutputHandler.GetReceiverBufferForPull(ctx.PullInfo())) + }) +} diff --git a/server/events/team_allowlist_checker.go b/server/events/team_allowlist_checker.go new file mode 100644 index 0000000000..5d0cc49f03 --- /dev/null +++ b/server/events/team_allowlist_checker.go @@ -0,0 +1,69 @@ +package events + +import ( + "strings" +) + +// Wildcard matches all teams and all commands +const wildcard = "*" + +// mapOfStrings is an alias for map[string]string +type mapOfStrings map[string]string + +// TeamAllowlistChecker implements checking the teams and the operations that the members +// of a particular team are allowed to perform +type TeamAllowlistChecker struct { + rules []mapOfStrings +} + +// NewTeamAllowlistChecker constructs a new checker +func NewTeamAllowlistChecker(allowlist string) (*TeamAllowlistChecker, error) { + var rules []mapOfStrings + pairs := strings.Split(allowlist, ",") + if pairs[0] != "" { + for _, pair := range pairs { + values := strings.Split(pair, ":") + team := strings.TrimSpace(values[0]) + command := strings.TrimSpace(values[1]) + m := mapOfStrings{team: command} + rules = append(rules, m) + } + } + return &TeamAllowlistChecker{ + rules: rules, + }, nil +} + +// IsCommandAllowedForTeam returns true if the team is allowed to execute the command +// and false otherwise. +func (checker *TeamAllowlistChecker) IsCommandAllowedForTeam(team string, command string) bool { + for _, rule := range checker.rules { + for key, value := range rule { + if (key == wildcard || strings.EqualFold(key, team)) && (value == wildcard || strings.EqualFold(value, command)) { + return true + } + } + } + return false +} + +// IsCommandAllowedForAnyTeam returns true if any of the teams is allowed to execute the command +// and false otherwise. +func (checker *TeamAllowlistChecker) IsCommandAllowedForAnyTeam(teams []string, command string) bool { + if len(teams) == 0 { + for _, rule := range checker.rules { + for key, value := range rule { + if (key == wildcard) && (value == wildcard || strings.EqualFold(value, command)) { + return true + } + } + } + } else { + for _, t := range teams { + if checker.IsCommandAllowedForTeam(t, command) { + return true + } + } + } + return false +} diff --git a/server/events/team_allowlist_checker_test.go b/server/events/team_allowlist_checker_test.go new file mode 100644 index 0000000000..59087cf141 --- /dev/null +++ b/server/events/team_allowlist_checker_test.go @@ -0,0 +1,36 @@ +package events_test + +import ( + "testing" + + "github.com/runatlantis/atlantis/server/events" + . "github.com/runatlantis/atlantis/testing" +) + +func TestNewTeamAllowListChecker(t *testing.T) { + allowlist := `bob:plan, dave:apply` + _, err := events.NewTeamAllowlistChecker(allowlist) + Ok(t, err) +} + +func TestIsCommandAllowedForTeam(t *testing.T) { + allowlist := `bob:plan, dave:apply, connie:plan, connie:apply` + checker, err := events.NewTeamAllowlistChecker(allowlist) + Ok(t, err) + Equals(t, true, checker.IsCommandAllowedForTeam("connie", "plan")) + Equals(t, true, checker.IsCommandAllowedForTeam("connie", "apply")) + Equals(t, true, checker.IsCommandAllowedForTeam("dave", "apply")) + Equals(t, true, checker.IsCommandAllowedForTeam("bob", "plan")) + Equals(t, false, checker.IsCommandAllowedForTeam("bob", "apply")) +} + +func TestIsCommandAllowedForAnyTeam(t *testing.T) { + allowlist := `alpha:plan,beta:release,*:unlock,nobody:*` + teams := []string{`alpha`, `beta`} + checker, err := events.NewTeamAllowlistChecker(allowlist) + Ok(t, err) + Equals(t, true, checker.IsCommandAllowedForAnyTeam(teams, `plan`)) + Equals(t, true, checker.IsCommandAllowedForAnyTeam(teams, `release`)) + Equals(t, true, checker.IsCommandAllowedForAnyTeam(teams, `unlock`)) + Equals(t, false, checker.IsCommandAllowedForAnyTeam(teams, `noop`)) +} diff --git a/server/events/terraform/ansi/strip.go b/server/events/terraform/ansi/strip.go new file mode 100644 index 0000000000..fa8265de2b --- /dev/null +++ b/server/events/terraform/ansi/strip.go @@ -0,0 +1,13 @@ +package ansi + +import ( + "regexp" +) + +const ansi = "[\u001B\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[a-zA-Z\\d]*)*)?\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PRZcf-ntqry=><~]))" + +var re = regexp.MustCompile(ansi) + +func Strip(str string) string { + return re.ReplaceAllString(str, "") +} diff --git a/server/events/vcs/azuredevops_client.go b/server/events/vcs/azuredevops_client.go index 2989544ab7..6d0b4469dd 100644 --- a/server/events/vcs/azuredevops_client.go +++ b/server/events/vcs/azuredevops_client.go @@ -136,7 +136,7 @@ func (g *AzureDevopsClient) HidePrevCommandComments(repo models.Repo, pullNum in // PullIsApproved returns true if the merge request was approved by another reviewer. // https://docs.microsoft.com/en-us/azure/devops/repos/git/branch-policies?view=azure-devops#require-a-minimum-number-of-reviewers -func (g *AzureDevopsClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (bool, error) { +func (g *AzureDevopsClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { owner, project, repoName := SplitAzureDevopsRepoFullName(repo.FullName) opts := azuredevops.PullRequestGetOptions{ @@ -144,7 +144,7 @@ func (g *AzureDevopsClient) PullIsApproved(repo models.Repo, pull models.PullReq } adPull, _, err := g.Client.PullRequests.GetWithRepo(g.ctx, owner, project, repoName, pull.Num, &opts) if err != nil { - return false, errors.Wrap(err, "getting pull request") + return approvalStatus, errors.Wrap(err, "getting pull request") } for _, review := range adPull.Reviewers { @@ -157,11 +157,13 @@ func (g *AzureDevopsClient) PullIsApproved(repo models.Repo, pull models.PullReq } if review.GetVote() == azuredevops.VoteApproved || review.GetVote() == azuredevops.VoteApprovedWithSuggestions { - return true, nil + return models.ApprovalStatus{ + IsApproved: true, + }, nil } } - return false, nil + return approvalStatus, nil } // PullIsMergeable returns true if the merge request can be merged. @@ -377,6 +379,11 @@ func SplitAzureDevopsRepoFullName(repoFullName string) (owner string, project st return repoFullName[:lastSlashIdx], "", repoFullName[lastSlashIdx+1:] } +// GetTeamNamesForUser returns the names of the teams or groups that the user belongs to (in the organization the repository belongs to). +func (g *AzureDevopsClient) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { + return nil, nil +} + func (g *AzureDevopsClient) SupportsSingleFileDownload(repo models.Repo) bool { return false } diff --git a/server/events/vcs/azuredevops_client_test.go b/server/events/vcs/azuredevops_client_test.go index 0b30a77526..a9314fd036 100644 --- a/server/events/vcs/azuredevops_client_test.go +++ b/server/events/vcs/azuredevops_client_test.go @@ -3,10 +3,11 @@ package vcs_test import ( "context" "fmt" - "io/ioutil" + "io" "net/http" "net/http/httptest" "net/url" + "os" "strings" "testing" @@ -191,7 +192,7 @@ func TestAzureDevopsClient_UpdateStatus(t *testing.T) { case "/owner/project/_apis/git/repositories/repo/pullrequests/22/statuses?api-version=5.1-preview.1": gotRequest = true defer r.Body.Close() // nolint: errcheck - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) Ok(t, err) exp := fmt.Sprintf(partResponse, c.expState) if c.supportsIterations == true { @@ -361,10 +362,10 @@ func TestAzureDevopsClient_PullIsMergeable(t *testing.T) { }, } - jsonPullRequestBytes, err := ioutil.ReadFile("fixtures/azuredevops-pr.json") + jsonPullRequestBytes, err := os.ReadFile("fixtures/azuredevops-pr.json") Ok(t, err) - jsonPolicyEvaluationBytes, err := ioutil.ReadFile("fixtures/azuredevops-policyevaluations.json") + jsonPolicyEvaluationBytes, err := os.ReadFile("fixtures/azuredevops-policyevaluations.json") Ok(t, err) pullRequestBody := string(jsonPullRequestBytes) @@ -465,7 +466,7 @@ func TestAzureDevopsClient_PullIsApproved(t *testing.T) { }, } - jsBytes, err := ioutil.ReadFile("fixtures/azuredevops-pr.json") + jsBytes, err := os.ReadFile("fixtures/azuredevops-pr.json") Ok(t, err) json := string(jsBytes) @@ -495,7 +496,7 @@ func TestAzureDevopsClient_PullIsApproved(t *testing.T) { defer disableSSLVerification()() - actApproved, err := client.PullIsApproved(models.Repo{ + approvalStatus, err := client.PullIsApproved(models.Repo{ FullName: "owner/project/repo", Owner: "owner", Name: "repo", @@ -509,14 +510,14 @@ func TestAzureDevopsClient_PullIsApproved(t *testing.T) { Num: 1, }) Ok(t, err) - Equals(t, c.expApproved, actApproved) + Equals(t, c.expApproved, approvalStatus.IsApproved) }) } } func TestAzureDevopsClient_GetPullRequest(t *testing.T) { // Use a real Azure DevOps json response and edit the mergeable_state field. - jsBytes, err := ioutil.ReadFile("fixtures/azuredevops-pr.json") + jsBytes, err := os.ReadFile("fixtures/azuredevops-pr.json") Ok(t, err) response := string(jsBytes) diff --git a/server/events/vcs/bitbucketcloud/client.go b/server/events/vcs/bitbucketcloud/client.go index 9ba1ca41ee..b926c8cf4a 100644 --- a/server/events/vcs/bitbucketcloud/client.go +++ b/server/events/vcs/bitbucketcloud/client.go @@ -5,7 +5,6 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "net/http" "github.com/pkg/errors" @@ -105,28 +104,30 @@ func (b *Client) HidePrevCommandComments(repo models.Repo, pullNum int, command } // PullIsApproved returns true if the merge request was approved. -func (b *Client) PullIsApproved(repo models.Repo, pull models.PullRequest) (bool, error) { +func (b *Client) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { path := fmt.Sprintf("%s/2.0/repositories/%s/pullrequests/%d", b.BaseURL, repo.FullName, pull.Num) resp, err := b.makeRequest("GET", path, nil) if err != nil { - return false, err + return approvalStatus, err } var pullResp PullRequest if err := json.Unmarshal(resp, &pullResp); err != nil { - return false, errors.Wrapf(err, "Could not parse response %q", string(resp)) + return approvalStatus, errors.Wrapf(err, "Could not parse response %q", string(resp)) } if err := validator.New().Struct(pullResp); err != nil { - return false, errors.Wrapf(err, "API response %q was missing fields", string(resp)) + return approvalStatus, errors.Wrapf(err, "API response %q was missing fields", string(resp)) } authorUUID := *pullResp.Author.UUID for _, participant := range pullResp.Participants { // Bitbucket allows the author to approve their own pull request. This // defeats the purpose of approvals so we don't count that approval. if *participant.Approved && *participant.User.UUID != authorUUID { - return true, nil + return models.ApprovalStatus{ + IsApproved: true, + }, nil } } - return false, nil + return approvalStatus, nil } // PullIsMergeable returns true if the merge request has no conflicts and can be merged. @@ -234,16 +235,21 @@ func (b *Client) makeRequest(method string, path string, reqBody io.Reader) ([]b requestStr := fmt.Sprintf("%s %s", method, path) if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated { - respBody, _ := ioutil.ReadAll(resp.Body) + respBody, _ := io.ReadAll(resp.Body) return nil, fmt.Errorf("making request %q unexpected status code: %d, body: %s", requestStr, resp.StatusCode, string(respBody)) } - respBody, err := ioutil.ReadAll(resp.Body) + respBody, err := io.ReadAll(resp.Body) if err != nil { return nil, errors.Wrapf(err, "reading response from request %q", requestStr) } return respBody, nil } +// GetTeamNamesForUser returns the names of the teams or groups that the user belongs to (in the organization the repository belongs to). +func (b *Client) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { + return nil, nil +} + func (b *Client) SupportsSingleFileDownload(models.Repo) bool { return false } diff --git a/server/events/vcs/bitbucketcloud/client_test.go b/server/events/vcs/bitbucketcloud/client_test.go index 0575e9e8ee..54e5179be8 100644 --- a/server/events/vcs/bitbucketcloud/client_test.go +++ b/server/events/vcs/bitbucketcloud/client_test.go @@ -2,9 +2,9 @@ package bitbucketcloud_test import ( "fmt" - "io/ioutil" "net/http" "net/http/httptest" + "os" "path/filepath" "testing" @@ -181,7 +181,7 @@ func TestClient_PullIsApproved(t *testing.T) { for _, c := range cases { t.Run(c.description, func(t *testing.T) { - json, err := ioutil.ReadFile(filepath.Join("testdata", c.testdata)) + json, err := os.ReadFile(filepath.Join("testdata", c.testdata)) Ok(t, err) testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { @@ -202,14 +202,14 @@ func TestClient_PullIsApproved(t *testing.T) { repo, err := models.NewRepo(models.BitbucketServer, "owner/repo", "https://bitbucket.org/owner/repo.git", "user", "token") Ok(t, err) - approved, err := client.PullIsApproved(repo, models.PullRequest{ + approvalStatus, err := client.PullIsApproved(repo, models.PullRequest{ Num: 1, HeadBranch: "branch", Author: "author", BaseRepo: repo, }) Ok(t, err) - Equals(t, c.exp, approved) + Equals(t, c.exp, approvalStatus.IsApproved) }) } } diff --git a/server/events/vcs/bitbucketserver/client.go b/server/events/vcs/bitbucketserver/client.go index 27af85354d..e0041e1d3e 100644 --- a/server/events/vcs/bitbucketserver/client.go +++ b/server/events/vcs/bitbucketserver/client.go @@ -5,7 +5,6 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "net/http" "net/url" "regexp" @@ -30,6 +29,11 @@ type Client struct { AtlantisURL string } +type DeleteSourceBranch struct { + Name string `json:"name"` + DryRun bool `json:"dryRun"` +} + // NewClient builds a bitbucket cloud client. Returns an error if the baseURL is // malformed. httpClient is the client to use to make the requests, username // and password are used as basic auth in the requests, baseURL is the API's @@ -161,29 +165,31 @@ func (b *Client) postComment(repo models.Repo, pullNum int, comment string) erro } // PullIsApproved returns true if the merge request was approved. -func (b *Client) PullIsApproved(repo models.Repo, pull models.PullRequest) (bool, error) { +func (b *Client) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { projectKey, err := b.GetProjectKey(repo.Name, repo.SanitizedCloneURL) if err != nil { - return false, err + return approvalStatus, err } path := fmt.Sprintf("%s/rest/api/1.0/projects/%s/repos/%s/pull-requests/%d", b.BaseURL, projectKey, repo.Name, pull.Num) resp, err := b.makeRequest("GET", path, nil) if err != nil { - return false, err + return approvalStatus, err } var pullResp PullRequest if err := json.Unmarshal(resp, &pullResp); err != nil { - return false, errors.Wrapf(err, "Could not parse response %q", string(resp)) + return approvalStatus, errors.Wrapf(err, "Could not parse response %q", string(resp)) } if err := validator.New().Struct(pullResp); err != nil { - return false, errors.Wrapf(err, "API response %q was missing fields", string(resp)) + return approvalStatus, errors.Wrapf(err, "API response %q was missing fields", string(resp)) } for _, reviewer := range pullResp.Reviewers { if *reviewer.Approved { - return true, nil + return models.ApprovalStatus{ + IsApproved: true, + }, nil } } - return false, nil + return approvalStatus, nil } // PullIsMergeable returns true if the merge request has no conflicts and can be merged. @@ -265,6 +271,21 @@ func (b *Client) MergePull(pull models.PullRequest, pullOptions models.PullReque } path = fmt.Sprintf("%s/rest/api/1.0/projects/%s/repos/%s/pull-requests/%d/merge?version=%d", b.BaseURL, projectKey, pull.BaseRepo.Name, pull.Num, *pullResp.Version) _, err = b.makeRequest("POST", path, nil) + if err != nil { + return err + } + if pullOptions.DeleteSourceBranchOnMerge { + bodyBytes, err := json.Marshal(DeleteSourceBranch{Name: "refs/heads/" + pull.HeadBranch, DryRun: false}) + if err != nil { + return errors.Wrap(err, "json encoding") + } + + path = fmt.Sprintf("%s/rest/branch-utils/1.0/projects/%s/repos/%s/branches", b.BaseURL, projectKey, pull.BaseRepo.Name) + _, err = b.makeRequest("DELETE", path, bytes.NewBuffer(bodyBytes)) + if err != nil { + return err + } + } return err } @@ -302,16 +323,21 @@ func (b *Client) makeRequest(method string, path string, reqBody io.Reader) ([]b requestStr := fmt.Sprintf("%s %s", method, path) if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated && resp.StatusCode != 204 { - respBody, _ := ioutil.ReadAll(resp.Body) + respBody, _ := io.ReadAll(resp.Body) return nil, fmt.Errorf("making request %q unexpected status code: %d, body: %s", requestStr, resp.StatusCode, string(respBody)) } - respBody, err := ioutil.ReadAll(resp.Body) + respBody, err := io.ReadAll(resp.Body) if err != nil { return nil, errors.Wrapf(err, "reading response from request %q", requestStr) } return respBody, nil } +// GetTeamNamesForUser returns the names of the teams or groups that the user belongs to (in the organization the repository belongs to). +func (b *Client) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { + return nil, nil +} + func (b *Client) SupportsSingleFileDownload(repo models.Repo) bool { return false } diff --git a/server/events/vcs/bitbucketserver/client_test.go b/server/events/vcs/bitbucketserver/client_test.go index 016db6a344..73aa8b0962 100644 --- a/server/events/vcs/bitbucketserver/client_test.go +++ b/server/events/vcs/bitbucketserver/client_test.go @@ -1,10 +1,12 @@ package bitbucketserver_test import ( + "encoding/json" "fmt" - "io/ioutil" + "io" "net/http" "net/http/httptest" + "os" "path/filepath" "strings" "testing" @@ -137,7 +139,7 @@ func TestClient_GetModifiedFilesPagination(t *testing.T) { // Test that we use the correct version parameter in our call to merge the pull // request. func TestClient_MergePull(t *testing.T) { - pullRequest, err := ioutil.ReadFile(filepath.Join("testdata", "pull-request.json")) + pullRequest, err := os.ReadFile(filepath.Join("testdata", "pull-request.json")) Ok(t, err) testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { @@ -183,6 +185,65 @@ func TestClient_MergePull(t *testing.T) { Ok(t, err) } +// Test that we delete the source branch in our call to merge the pull +// request. +func TestClient_MergePullDeleteSourceBranch(t *testing.T) { + pullRequest, err := os.ReadFile(filepath.Join("testdata", "pull-request.json")) + Ok(t, err) + testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + // The first request should hit this URL. + case "/rest/api/1.0/projects/ow/repos/repo/pull-requests/1": + w.Write(pullRequest) // nolint: errcheck + return + case "/rest/api/1.0/projects/ow/repos/repo/pull-requests/1/merge?version=3": + Equals(t, "POST", r.Method) + w.Write(pullRequest) // nolint: errcheck + case "/rest/branch-utils/1.0/projects/ow/repos/repo/branches": + Equals(t, "DELETE", r.Method) + defer r.Body.Close() + b, err := io.ReadAll(r.Body) + Ok(t, err) + var payload bitbucketserver.DeleteSourceBranch + err = json.Unmarshal(b, &payload) + Ok(t, err) + Equals(t, "refs/heads/foo", payload.Name) + w.WriteHeader(http.StatusNoContent) // nolint: errcheck + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + return + } + })) + defer testServer.Close() + + client, err := bitbucketserver.NewClient(http.DefaultClient, "user", "pass", testServer.URL, "runatlantis.io") + Ok(t, err) + + err = client.MergePull(models.PullRequest{ + Num: 1, + HeadCommit: "", + URL: "", + HeadBranch: "foo", + BaseBranch: "", + Author: "", + State: 0, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + SanitizedCloneURL: fmt.Sprintf("%s/scm/ow/repo.git", testServer.URL), + VCSHost: models.VCSHost{ + Type: models.BitbucketServer, + Hostname: "bitbucket.org", + }, + }, + }, models.PullRequestOptions{ + DeleteSourceBranchOnMerge: true, + }) + Ok(t, err) +} + func TestClient_MarkdownPullLink(t *testing.T) { client, err := bitbucketserver.NewClient(nil, "u", "p", "https://base-url", "atlantis-url") Ok(t, err) diff --git a/server/events/vcs/bitbucketserver/models.go b/server/events/vcs/bitbucketserver/models.go index f9c34d4fc9..5646ca4256 100644 --- a/server/events/vcs/bitbucketserver/models.go +++ b/server/events/vcs/bitbucketserver/models.go @@ -3,6 +3,7 @@ package bitbucketserver const ( DiagnosticsPingHeader = "diagnostics:ping" PullCreatedHeader = "pr:opened" + PullFromRefUpdatedHeader = "pr:from_ref_updated" PullMergedHeader = "pr:merged" PullDeclinedHeader = "pr:declined" PullDeletedHeader = "pr:deleted" diff --git a/server/events/vcs/client.go b/server/events/vcs/client.go index 0f49cc89de..e63fdcd1f7 100644 --- a/server/events/vcs/client.go +++ b/server/events/vcs/client.go @@ -26,7 +26,7 @@ type Client interface { GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) CreateComment(repo models.Repo, pullNum int, comment string, command string) error HidePrevCommandComments(repo models.Repo, pullNum int, command string) error - PullIsApproved(repo models.Repo, pull models.PullRequest) (bool, error) + PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) PullIsMergeable(repo models.Repo, pull models.PullRequest) (bool, error) // UpdateStatus updates the commit status to state for pull. src is the // source of this status. This should be relatively static across runs, @@ -38,6 +38,7 @@ type Client interface { UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) error MarkdownPullLink(pull models.PullRequest) (string, error) + GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) // DownloadRepoConfigFile return `atlantis.yaml` content from VCS (which support fetch a single file from repository) // The first return value indicate that repo contain atlantis.yaml or not diff --git a/server/events/vcs/fixtures/fixtures.go b/server/events/vcs/fixtures/fixtures.go index 5bd935ba4a..5d02277f06 100644 --- a/server/events/vcs/fixtures/fixtures.go +++ b/server/events/vcs/fixtures/fixtures.go @@ -21,7 +21,7 @@ import ( "strings" "testing" - "github.com/dgrijalva/jwt-go" + "github.com/golang-jwt/jwt/v4" "github.com/google/go-github/v31/github" "github.com/mcdafydd/go-azuredevops/azuredevops" ) @@ -227,6 +227,172 @@ var ADPullJSON = `{ "artifactId": "vstfs:///Git/PullRequestId/a7573007-bbb3-4341-b726-0c4148a07853%2f3411ebc1-d5aa-464f-9615-0b527bc66719%2f22" }` +var ADSelfPullEvent = azuredevops.Event{ + EventType: "git.pullrequest.created", + Resource: &ADSelfPull, +} + +var ADSelfPullUpdatedEvent = azuredevops.Event{ + EventType: "git.pullrequest.updated", + Resource: &ADSelfPull, +} + +var ADSelfPullClosedEvent = azuredevops.Event{ + EventType: "git.pullrequest.merged", + Resource: &ADSelfPullCompleted, +} + +var ADSelfPull = azuredevops.GitPullRequest{ + CreatedBy: &azuredevops.IdentityRef{ + ID: azuredevops.String("d6245f20-2af8-44f4-9451-8107cb2767db"), + DisplayName: azuredevops.String("User"), + UniqueName: azuredevops.String("user@example.com"), + }, + LastMergeSourceCommit: &azuredevops.GitCommitRef{ + CommitID: azuredevops.String("b60280bc6e62e2f880f1b63c1e24987664d3bda3"), + URL: azuredevops.String("https://devops.abc.com/owner/project/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/commits/b60280bc6e62e2f880f1b63c1e24987664d3bda3"), + }, + PullRequestID: azuredevops.Int(1), + Repository: &ADSelfRepo, + SourceRefName: azuredevops.String("refs/heads/feature/sourceBranch"), + Status: azuredevops.String("active"), + TargetRefName: azuredevops.String("refs/heads/targetBranch"), + URL: azuredevops.String("https://devops.abc.com/owner/project/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/pullRequests/21"), +} + +var ADSelfPullCompleted = azuredevops.GitPullRequest{ + CreatedBy: &azuredevops.IdentityRef{ + ID: azuredevops.String("d6245f20-2af8-44f4-9451-8107cb2767db"), + DisplayName: azuredevops.String("User"), + UniqueName: azuredevops.String("user@example.com"), + }, + LastMergeSourceCommit: &azuredevops.GitCommitRef{ + CommitID: azuredevops.String("b60280bc6e62e2f880f1b63c1e24987664d3bda3"), + URL: azuredevops.String("https://https://devops.abc.com/owner/project/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/commits/b60280bc6e62e2f880f1b63c1e24987664d3bda3"), + }, + PullRequestID: azuredevops.Int(1), + Repository: &ADSelfRepo, + SourceRefName: azuredevops.String("refs/heads/owner/sourceBranch"), + Status: azuredevops.String("completed"), + TargetRefName: azuredevops.String("refs/heads/targetBranch"), + URL: azuredevops.String("https://devops.abc.com/owner/project/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/pullRequests/21"), +} + +var ADSelfRepo = azuredevops.GitRepository{ + DefaultBranch: azuredevops.String("refs/heads/master"), + Name: azuredevops.String("repo"), + ParentRepository: &azuredevops.GitRepositoryRef{ + Name: azuredevops.String("owner"), + }, + Project: &azuredevops.TeamProjectReference{ + ID: azuredevops.String("a21f5f20-4a12-aaf4-ab12-9a0927cbbb90"), + Name: azuredevops.String("project"), + State: azuredevops.String("unchanged"), + }, + WebURL: azuredevops.String("https://devops.abc.com/owner/project/_git/repo"), +} + +var ADSelfPullJSON = `{ + "repository": { + "id": "3411ebc1-d5aa-464f-9615-0b527bc66719", + "name": "repo", + "url": "https://devops.abc.com/owner/project/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719", + "webUrl": "https://devops.abc.com/owner/project/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719", + "project": { + "id": "a7573007-bbb3-4341-b726-0c4148a07853", + "name": "project", + "description": "test project created on Halloween 2016", + "url": "https://dev.azure.com/owner/_apis/projects/a7573007-bbb3-4341-b726-0c4148a07853", + "state": "wellFormed", + "revision": 7 + }, + "remoteUrl": "https://devops.abc.com/owner/project/_git/repo" + }, + "pullRequestId": 22, + "codeReviewId": 22, + "status": "active", + "createdBy": { + "id": "d6245f20-2af8-44f4-9451-8107cb2767db", + "displayName": "Normal Paulk", + "uniqueName": "fabrikamfiber16@hotmail.com", + "url": "https://dev.azure.com/owner/_apis/Identities/d6245f20-2af8-44f4-9451-8107cb2767db", + "imageUrl": "https://dev.azure.com/owner/_api/_common/identityImage?id=d6245f20-2af8-44f4-9451-8107cb2767db" + }, + "creationDate": "2016-11-01T16:30:31.6655471Z", + "title": "A new feature", + "description": "Adding a new feature", + "sourceRefName": "refs/heads/npaulk/my_work", + "targetRefName": "refs/heads/new_feature", + "mergeStatus": "succeeded", + "mergeId": "f5fc8381-3fb2-49fe-8a0d-27dcc2d6ef82", + "lastMergeSourceCommit": { + "commitId": "b60280bc6e62e2f880f1b63c1e24987664d3bda3", + "url": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/commits/b60280bc6e62e2f880f1b63c1e24987664d3bda3" + }, + "lastMergeTargetCommit": { + "commitId": "f47bbc106853afe3c1b07a81754bce5f4b8dbf62", + "url": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/commits/f47bbc106853afe3c1b07a81754bce5f4b8dbf62" + }, + "lastMergeCommit": { + "commitId": "39f52d24533cc712fc845ed9fd1b6c06b3942588", + "author": { + "name": "Normal Paulk", + "email": "fabrikamfiber16@hotmail.com", + "date": "2016-11-01T16:30:32Z" + }, + "committer": { + "name": "Normal Paulk", + "email": "fabrikamfiber16@hotmail.com", + "date": "2016-11-01T16:30:32Z" + }, + "comment": "Merge pull request 22 from npaulk/my_work into new_feature", + "url": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/commits/39f52d24533cc712fc845ed9fd1b6c06b3942588" + }, + "reviewers": [ + { + "reviewerUrl": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/pullRequests/22/reviewers/d6245f20-2af8-44f4-9451-8107cb2767db", + "vote": 0, + "id": "d6245f20-2af8-44f4-9451-8107cb2767db", + "displayName": "Normal Paulk", + "uniqueName": "fabrikamfiber16@hotmail.com", + "url": "https://dev.azure.com/owner/_apis/Identities/d6245f20-2af8-44f4-9451-8107cb2767db", + "imageUrl": "https://dev.azure.com/owner/_api/_common/identityImage?id=d6245f20-2af8-44f4-9451-8107cb2767db" + } + ], + "url": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/pullRequests/22", + "_links": { + "self": { + "href": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/pullRequests/22" + }, + "repository": { + "href": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719" + }, + "workItems": { + "href": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/pullRequests/22/workitems" + }, + "sourceBranch": { + "href": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/refs" + }, + "targetBranch": { + "href": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/refs" + }, + "sourceCommit": { + "href": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/commits/b60280bc6e62e2f880f1b63c1e24987664d3bda3" + }, + "targetCommit": { + "href": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/commits/f47bbc106853afe3c1b07a81754bce5f4b8dbf62" + }, + "createdBy": { + "href": "https://dev.azure.com/owner/_apis/Identities/d6245f20-2af8-44f4-9451-8107cb2767db" + }, + "iterations": { + "href": "https://dev.azure.com/owner/_apis/git/repositories/3411ebc1-d5aa-464f-9615-0b527bc66719/pullRequests/22/iterations" + } + }, + "supportsIterations": true, + "artifactId": "vstfs:///Git/PullRequestId/a7573007-bbb3-4341-b726-0c4148a07853%2f3411ebc1-d5aa-464f-9615-0b527bc66719%2f22" +}` + const GithubPrivateKey = `-----BEGIN RSA PRIVATE KEY----- MIIEowIBAAKCAQEAuEPzOUE+kiEH1WLiMeBytTEF856j0hOVcSUSUkZxKvqczkWM 9vo1gDyC7ZXhdH9fKh32aapba3RSsp4ke+giSmYTk2mGR538ShSDxh0OgpJmjiKP diff --git a/server/events/vcs/github_client.go b/server/events/vcs/github_client.go index 6079fe408c..96f7877d6c 100644 --- a/server/events/vcs/github_client.go +++ b/server/events/vcs/github_client.go @@ -33,15 +33,18 @@ import ( // maxCommentLength is the maximum number of chars allowed in a single comment // by GitHub. -const maxCommentLength = 65536 +const ( + maxCommentLength = 65536 +) // GithubClient is used to perform GitHub actions. type GithubClient struct { - user string - client *github.Client - v4MutateClient *graphql.Client - ctx context.Context - logger logging.SimpleLogging + user string + client *github.Client + v4MutateClient *graphql.Client + ctx context.Context + logger logging.SimpleLogging + statusTitleMatcher StatusTitleMatcher } // GithubAppTemporarySecrets holds app credentials obtained from github after creation. @@ -59,7 +62,7 @@ type GithubAppTemporarySecrets struct { } // NewGithubClient returns a valid GitHub client. -func NewGithubClient(hostname string, credentials GithubCredentials, logger logging.SimpleLogging) (*GithubClient, error) { +func NewGithubClient(hostname string, credentials GithubCredentials, logger logging.SimpleLogging, commitStatusPrefix string) (*GithubClient, error) { transport, err := credentials.Client() if err != nil { return nil, errors.Wrap(err, "error initializing github authentication transport") @@ -99,11 +102,12 @@ func NewGithubClient(hostname string, credentials GithubCredentials, logger logg return nil, errors.Wrap(err, "getting user") } return &GithubClient{ - user: user, - client: client, - v4MutateClient: v4MutateClient, - ctx: context.Background(), - logger: logger, + user: user, + client: client, + v4MutateClient: v4MutateClient, + ctx: context.Background(), + logger: logger, + statusTitleMatcher: StatusTitleMatcher{TitlePrefix: commitStatusPrefix}, }, nil } @@ -232,7 +236,7 @@ func (g *GithubClient) HidePrevCommandComments(repo models.Repo, pullNum int, co } // PullIsApproved returns true if the pull request was approved. -func (g *GithubClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (bool, error) { +func (g *GithubClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { nextPage := 0 for { opts := github.ListOptions{ @@ -244,11 +248,15 @@ func (g *GithubClient) PullIsApproved(repo models.Repo, pull models.PullRequest) g.logger.Debug("GET /repos/%v/%v/pulls/%d/reviews", repo.Owner, repo.Name, pull.Num) pageReviews, resp, err := g.client.PullRequests.ListReviews(g.ctx, repo.Owner, repo.Name, pull.Num, &opts) if err != nil { - return false, errors.Wrap(err, "getting reviews") + return approvalStatus, errors.Wrap(err, "getting reviews") } for _, review := range pageReviews { if review != nil && review.GetState() == "APPROVED" { - return true, nil + return models.ApprovalStatus{ + IsApproved: true, + ApprovedBy: *review.User.Login, + Date: *review.SubmittedAt, + }, nil } } if resp.NextPage == 0 { @@ -256,7 +264,7 @@ func (g *GithubClient) PullIsApproved(repo models.Repo, pull models.PullRequest) } nextPage = resp.NextPage } - return false, nil + return approvalStatus, nil } // PullIsMergeable returns true if the pull request is mergeable. @@ -276,8 +284,40 @@ func (g *GithubClient) PullIsMergeable(repo models.Repo, pull models.PullRequest // hooks. Merging is allowed (green box). // See: https://github.com/octokit/octokit.net/issues/1763 if state != "clean" && state != "unstable" && state != "has_hooks" { + + if state != "blocked" { + return false, nil + } + + return g.getSupplementalMergeability(repo, pull) + } + return true, nil +} + +// Checks to make sure that all statuses are passing except the atlantis/apply. If we only rely on GetMergeableState, +// we can run into issues where if an apply failed, we can never apply again due to mergeability failures. +func (g *GithubClient) getSupplementalMergeability(repo models.Repo, pull models.PullRequest) (bool, error) { + statuses, err := g.getRepoStatuses(repo, pull) + + if err != nil { + return false, errors.Wrapf(err, "fetching repo statuses for repo: %s, and pull number: %d", repo.FullName, pull.Num) + } + + for _, status := range statuses { + state := status.GetState() + + if g.statusTitleMatcher.MatchesCommand(status.GetContext(), "apply") || + state == "success" { + continue + + } + + // we either have a failure or a pending status check + // hence the PR is not mergeable return false, nil } + + // all our status checks are successful by our definition, return true, nil } @@ -288,10 +328,14 @@ func (g *GithubClient) GetPullRequest(repo models.Repo, num int) (*github.PullRe // GitHub has started to return 404's here (#1019) even after they send the webhook. // They've got some eventual consistency issues going on so we're just going - // to retry up to 3 times with a 1s sleep. - numRetries := 3 - retryDelay := 1 * time.Second - for i := 0; i < numRetries; i++ { + // to attempt up to 5 times with exponential backoff. + maxAttempts := 5 + attemptDelay := 0 * time.Second + for i := 0; i < maxAttempts; i++ { + // First don't sleep, then sleep 1, 3, 7, etc. + time.Sleep(attemptDelay) + attemptDelay = 2*attemptDelay + 1*time.Second + pull, _, err = g.client.PullRequests.Get(g.ctx, repo.Owner, repo.Name, num) if err == nil { return pull, nil @@ -300,11 +344,42 @@ func (g *GithubClient) GetPullRequest(repo models.Repo, num int) (*github.PullRe if !ok || ghErr.Response.StatusCode != 404 { return pull, err } - time.Sleep(retryDelay) } return pull, err } +func (g *GithubClient) getRepoStatuses(repo models.Repo, pull models.PullRequest) ([]*github.RepoStatus, error) { + // Get Combined statuses + + nextPage := 0 + + var result []*github.RepoStatus + + for { + opts := github.ListOptions{ + // explicit default + // https://developer.github.com/v3/repos/statuses/#list-commit-statuses-for-a-reference + PerPage: 100, + } + if nextPage != 0 { + opts.Page = nextPage + } + + combinedStatus, response, err := g.client.Repositories.GetCombinedStatus(g.ctx, repo.Owner, repo.Name, pull.HeadCommit, &opts) + result = append(result, combinedStatus.Statuses...) + + if err != nil { + return nil, err + } + if response.NextPage == 0 { + break + } + nextPage = response.NextPage + } + + return result, nil +} + // UpdateStatus updates the status badge on the pull request. // See https://github.com/blog/1227-commit-status-api. func (g *GithubClient) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { @@ -379,6 +454,35 @@ func (g *GithubClient) MarkdownPullLink(pull models.PullRequest) (string, error) return fmt.Sprintf("#%d", pull.Num), nil } +// GetTeamNamesForUser returns the names of the teams or groups that the user belongs to (in the organization the repository belongs to). +// https://developer.github.com/v3/teams/members/#get-team-membership +func (g *GithubClient) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { + var teamNames []string + opts := &github.ListOptions{} + org := repo.Owner + for { + teams, resp, err := g.client.Teams.ListTeams(g.ctx, org, opts) + if err != nil { + return nil, errors.Wrap(err, "retrieving GitHub teams") + } + for _, t := range teams { + membership, _, err := g.client.Teams.GetTeamMembershipBySlug(g.ctx, org, *t.Slug, user.Username) + if err != nil { + g.logger.Err("Failed to get team membership from GitHub: %s", err) + } else if membership != nil { + if *membership.State == "active" && (*membership.Role == "member" || *membership.Role == "maintainer") { + teamNames = append(teamNames, t.GetName()) + } + } + } + if resp.NextPage == 0 { + break + } + opts.Page = resp.NextPage + } + return teamNames, nil +} + // ExchangeCode returns a newly created app's info func (g *GithubClient) ExchangeCode(code string) (*GithubAppTemporarySecrets, error) { ctx := context.Background() diff --git a/server/events/vcs/github_client_internal_test.go b/server/events/vcs/github_client_internal_test.go index ebe039d319..70b79912ea 100644 --- a/server/events/vcs/github_client_internal_test.go +++ b/server/events/vcs/github_client_internal_test.go @@ -22,14 +22,14 @@ import ( // If the hostname is github.com, should use normal BaseURL. func TestNewGithubClient_GithubCom(t *testing.T) { - client, err := NewGithubClient("github.com", &GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := NewGithubClient("github.com", &GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) Equals(t, "https://github.com/gitapi/", client.client.BaseURL.String()) } // If the hostname is a non-github hostname should use the right BaseURL. func TestNewGithubClient_NonGithub(t *testing.T) { - client, err := NewGithubClient("example.com", &GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := NewGithubClient("example.com", &GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) Equals(t, "https://example.com/api/v3/", client.client.BaseURL.String()) // If possible in the future, test the GraphQL client's URL as well. But at the diff --git a/server/events/vcs/github_client_test.go b/server/events/vcs/github_client_test.go index 115902dd88..f31de0d6a9 100644 --- a/server/events/vcs/github_client_test.go +++ b/server/events/vcs/github_client_test.go @@ -4,10 +4,12 @@ import ( "crypto/tls" "encoding/json" "fmt" + "io" "io/ioutil" "net/http" "net/http/httptest" "net/url" + "os" "strings" "testing" @@ -61,7 +63,7 @@ func TestGithubClient_GetModifiedFiles(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logger) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logger, "atlantis") Ok(t, err) defer disableSSLVerification()() @@ -116,7 +118,7 @@ func TestGithubClient_GetModifiedFilesMovedFile(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() @@ -169,7 +171,7 @@ func TestGithubClient_PaginatesComments(t *testing.T) { switch r.Method + " " + r.RequestURI { case "POST /api/graphql": defer r.Body.Close() // nolint: errcheck - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) if err != nil { t.Errorf("read body error: %v", err) http.Error(w, "server error", http.StatusInternalServerError) @@ -210,7 +212,7 @@ func TestGithubClient_PaginatesComments(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() @@ -272,7 +274,7 @@ func TestGithubClient_HideOldComments(t *testing.T) { return } defer r.Body.Close() // nolint: errcheck - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) if err != nil { t.Errorf("read body error: %v", err) http.Error(w, "server error", http.StatusInternalServerError) @@ -299,7 +301,7 @@ func TestGithubClient_HideOldComments(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() @@ -350,7 +352,7 @@ func TestGithubClient_UpdateStatus(t *testing.T) { http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { case "/api/v3/repos/owner/repo/statuses/": - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) Ok(t, err) exp := fmt.Sprintf(`{"state":"%s","target_url":"https://google.com","description":"description","context":"src"}%s`, c.expState, "\n") Equals(t, exp, string(body)) @@ -365,7 +367,7 @@ func TestGithubClient_UpdateStatus(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() @@ -451,11 +453,11 @@ func TestGithubClient_PullIsApproved(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() - approved, err := client.PullIsApproved(models.Repo{ + approvalStatus, err := client.PullIsApproved(models.Repo{ FullName: "owner/repo", Owner: "owner", Name: "repo", @@ -469,7 +471,7 @@ func TestGithubClient_PullIsApproved(t *testing.T) { Num: 1, }) Ok(t, err) - Equals(t, false, approved) + Equals(t, false, approvalStatus.IsApproved) } func TestGithubClient_PullIsMergeable(t *testing.T) { @@ -485,10 +487,6 @@ func TestGithubClient_PullIsMergeable(t *testing.T) { "unknown", false, }, - { - "blocked", - false, - }, { "behind", false, @@ -516,7 +514,7 @@ func TestGithubClient_PullIsMergeable(t *testing.T) { } // Use a real GitHub json response and edit the mergeable_state field. - jsBytes, err := ioutil.ReadFile("fixtures/github-pull-request.json") + jsBytes, err := os.ReadFile("fixtures/github-pull-request.json") Ok(t, err) json := string(jsBytes) @@ -542,7 +540,7 @@ func TestGithubClient_PullIsMergeable(t *testing.T) { })) testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() @@ -565,6 +563,115 @@ func TestGithubClient_PullIsMergeable(t *testing.T) { } } +func TestGithubClient_PullisMergeable_BlockedStatus(t *testing.T) { + + // Use a real GitHub json response and edit the mergeable_state field. + jsBytes, err := ioutil.ReadFile("fixtures/github-pull-request.json") + Ok(t, err) + json := string(jsBytes) + + pullResponse := strings.Replace(json, + `"mergeable_state": "clean"`, + fmt.Sprintf(`"mergeable_state": "%s"`, "blocked"), + 1, + ) + + combinedStatusJSON := `{ + "state": "success", + "statuses": [%s] + }` + statusJSON := `{ + "url": "https://github.com/gitapi/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", + "avatar_url": "https://github.com/images/error/other_user_happy.gif", + "id": 2, + "node_id": "MDY6U3RhdHVzMg==", + "state": "%s", + "description": "Testing has completed successfully", + "target_url": "https://ci.example.com/2000/output", + "context": "%s", + "created_at": "2012-08-20T01:19:13Z", + "updated_at": "2012-08-20T01:19:13Z" + }` + + cases := []struct { + description string + statuses []string + expMergeable bool + }{ + { + "apply-failure", + []string{ + fmt.Sprintf(statusJSON, "failure", "atlantis/apply"), + }, + true, + }, + { + "apply-project-failure", + []string{ + fmt.Sprintf(statusJSON, "failure", "atlantis/apply: terraform_cloud_workspace"), + }, + true, + }, + { + "sq-pending+owners-failure", + []string{ + fmt.Sprintf(statusJSON, "failure", "atlantis/plan"), + fmt.Sprintf(statusJSON, "failure", "atlantis/apply"), + }, + false, + }, + } + + for _, c := range cases { + + t.Run("blocked/"+c.description, func(t *testing.T) { + testServer := httptest.NewTLSServer( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/api/v3/repos/owner/repo/commits/2/status?per_page=100": + _, _ = w.Write([]byte( + fmt.Sprintf(combinedStatusJSON, strings.Join(c.statuses, ",")), + )) // nolint: errcheck + return + case "/api/v3/repos/owner/repo/pulls/1": + w.Write([]byte(pullResponse)) // nolint: errcheck + return + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + return + } + })) + + defer testServer.Close() + + testServerURL, err := url.Parse(testServer.URL) + Ok(t, err) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") + Ok(t, err) + defer disableSSLVerification()() + + actMergeable, err := client.PullIsMergeable(models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.Github, + Hostname: "github.com", + }, + }, models.PullRequest{ + Num: 1, + HeadCommit: "2", + }) + Ok(t, err) + Equals(t, c.expMergeable, actMergeable) + }) + } + +} + func TestGithubClient_MergePullHandlesError(t *testing.T) { cases := []struct { code int @@ -590,7 +697,7 @@ func TestGithubClient_MergePullHandlesError(t *testing.T) { }, } - jsBytes, err := ioutil.ReadFile("fixtures/github-repo.json") + jsBytes, err := os.ReadFile("fixtures/github-repo.json") Ok(t, err) for _, c := range cases { @@ -602,7 +709,7 @@ func TestGithubClient_MergePullHandlesError(t *testing.T) { w.Write(jsBytes) // nolint: errcheck return case "/api/v3/repos/owner/repo/pulls/1/merge": - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) Ok(t, err) exp := "{\"merge_method\":\"merge\"}\n" Equals(t, exp, string(body)) @@ -624,7 +731,7 @@ func TestGithubClient_MergePullHandlesError(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() @@ -700,7 +807,7 @@ func TestGithubClient_MergePullCorrectMethod(t *testing.T) { t.Run(name, func(t *testing.T) { // Modify response. - jsBytes, err := ioutil.ReadFile("fixtures/github-repo.json") + jsBytes, err := os.ReadFile("fixtures/github-repo.json") Ok(t, err) resp := string(jsBytes) resp = strings.Replace(resp, @@ -723,7 +830,7 @@ func TestGithubClient_MergePullCorrectMethod(t *testing.T) { w.Write([]byte(resp)) // nolint: errcheck return case "/api/v3/repos/runatlantis/atlantis/pulls/1/merge": - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) Ok(t, err) defer r.Body.Close() // nolint: errcheck type bodyJSON struct { @@ -747,7 +854,7 @@ func TestGithubClient_MergePullCorrectMethod(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() @@ -775,7 +882,7 @@ func TestGithubClient_MergePullCorrectMethod(t *testing.T) { } func TestGithubClient_MarkdownPullLink(t *testing.T) { - client, err := vcs.NewGithubClient("hostname", &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient("hostname", &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) pull := models.PullRequest{Num: 1} s, _ := client.MarkdownPullLink(pull) @@ -806,7 +913,7 @@ func TestGithubClient_SplitComments(t *testing.T) { switch r.Method + " " + r.RequestURI { case "POST /api/v3/repos/runatlantis/atlantis/issues/1/comments": defer r.Body.Close() // nolint: errcheck - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) if err != nil { t.Errorf("read body error: %v", err) http.Error(w, "server error", http.StatusInternalServerError) @@ -830,7 +937,7 @@ func TestGithubClient_SplitComments(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() pull := models.PullRequest{Num: 1} @@ -888,7 +995,7 @@ func TestGithubClient_Retry404(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopLogger(t), "atlantis") Ok(t, err) defer disableSSLVerification()() repo := models.Repo{ diff --git a/server/events/vcs/github_credentials.go b/server/events/vcs/github_credentials.go index 5c27eadea8..d70376c14d 100644 --- a/server/events/vcs/github_credentials.go +++ b/server/events/vcs/github_credentials.go @@ -7,7 +7,7 @@ import ( "net/url" "strings" - "github.com/bradleyfalzon/ghinstallation" + "github.com/bradleyfalzon/ghinstallation/v2" "github.com/google/go-github/v31/github" "github.com/pkg/errors" ) diff --git a/server/events/vcs/github_credentials_test.go b/server/events/vcs/github_credentials_test.go index a85d8d2d95..a6951c1db5 100644 --- a/server/events/vcs/github_credentials_test.go +++ b/server/events/vcs/github_credentials_test.go @@ -15,7 +15,7 @@ func TestGithubClient_GetUser_AppSlug(t *testing.T) { Ok(t, err) anonCreds := &vcs.GithubAnonymousCredentials{} - anonClient, err := vcs.NewGithubClient(testServer, anonCreds, logging.NewNoopLogger(t)) + anonClient, err := vcs.NewGithubClient(testServer, anonCreds, logging.NewNoopLogger(t), "atlantis") Ok(t, err) tempSecrets, err := anonClient.ExchangeCode("good-code") Ok(t, err) @@ -39,7 +39,7 @@ func TestGithubClient_AppAuthentication(t *testing.T) { Ok(t, err) anonCreds := &vcs.GithubAnonymousCredentials{} - anonClient, err := vcs.NewGithubClient(testServer, anonCreds, logging.NewNoopLogger(t)) + anonClient, err := vcs.NewGithubClient(testServer, anonCreds, logging.NewNoopLogger(t), "atlantis") Ok(t, err) tempSecrets, err := anonClient.ExchangeCode("good-code") Ok(t, err) @@ -49,7 +49,7 @@ func TestGithubClient_AppAuthentication(t *testing.T) { Key: []byte(fixtures.GithubPrivateKey), Hostname: testServer, } - _, err = vcs.NewGithubClient(testServer, appCreds, logging.NewNoopLogger(t)) + _, err = vcs.NewGithubClient(testServer, appCreds, logging.NewNoopLogger(t), "atlantis") Ok(t, err) token, err := appCreds.GetToken() diff --git a/server/events/vcs/gitlab_client.go b/server/events/vcs/gitlab_client.go index 07e26df88d..509d0e97df 100644 --- a/server/events/vcs/gitlab_client.go +++ b/server/events/vcs/gitlab_client.go @@ -168,15 +168,17 @@ func (g *GitlabClient) HidePrevCommandComments(repo models.Repo, pullNum int, co } // PullIsApproved returns true if the merge request was approved. -func (g *GitlabClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (bool, error) { +func (g *GitlabClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { approvals, _, err := g.Client.MergeRequests.GetMergeRequestApprovals(repo.FullName, pull.Num) if err != nil { - return false, err + return approvalStatus, err } if approvals.ApprovalsLeft > 0 { - return false, nil + return approvalStatus, nil } - return true, nil + return models.ApprovalStatus{ + IsApproved: true, + }, nil } // PullIsMergeable returns true if the merge request can be merged. @@ -352,6 +354,11 @@ func MustConstraint(constraint string) version.Constraints { return c } +// GetTeamNamesForUser returns the names of the teams or groups that the user belongs to (in the organization the repository belongs to). +func (g *GitlabClient) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { + return nil, nil +} + // DownloadRepoConfigFile return `atlantis.yaml` content from VCS (which support fetch a single file from repository) // The first return value indicate that repo contain atlantis.yaml or not // if BaseRepo had one repo config file, its content will placed on the second return value diff --git a/server/events/vcs/gitlab_client_test.go b/server/events/vcs/gitlab_client_test.go index 9fdc29e0af..ceb393293a 100644 --- a/server/events/vcs/gitlab_client_test.go +++ b/server/events/vcs/gitlab_client_test.go @@ -2,7 +2,7 @@ package vcs import ( "fmt" - "io/ioutil" + "io" "net/http" "net/http/httptest" "testing" @@ -210,7 +210,7 @@ func TestGitlabClient_UpdateStatus(t *testing.T) { case "/api/v4/projects/runatlantis%2Fatlantis/statuses/sha": gotRequest = true - body, err := ioutil.ReadAll(r.Body) + body, err := io.ReadAll(r.Body) Ok(t, err) exp := fmt.Sprintf(`{"state":"%s","context":"src","target_url":"https://google.com","description":"description"}`, c.expState) Equals(t, exp, string(body)) diff --git a/server/events/vcs/mocks/matchers/models_approvalstatus.go b/server/events/vcs/mocks/matchers/models_approvalstatus.go new file mode 100644 index 0000000000..01b76dd968 --- /dev/null +++ b/server/events/vcs/mocks/matchers/models_approvalstatus.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + models "github.com/runatlantis/atlantis/server/events/models" +) + +func AnyModelsApprovalStatus() models.ApprovalStatus { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.ApprovalStatus))(nil)).Elem())) + var nullValue models.ApprovalStatus + return nullValue +} + +func EqModelsApprovalStatus(value models.ApprovalStatus) models.ApprovalStatus { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue models.ApprovalStatus + return nullValue +} + +func NotEqModelsApprovalStatus(value models.ApprovalStatus) models.ApprovalStatus { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue models.ApprovalStatus + return nullValue +} + +func ModelsApprovalStatusThat(matcher pegomock.ArgumentMatcher) models.ApprovalStatus { + pegomock.RegisterMatcher(matcher) + var nullValue models.ApprovalStatus + return nullValue +} diff --git a/server/events/vcs/mocks/mock_client.go b/server/events/vcs/mocks/mock_client.go index a39dd61e45..a2bfd40d1c 100644 --- a/server/events/vcs/mocks/mock_client.go +++ b/server/events/vcs/mocks/mock_client.go @@ -25,30 +25,11 @@ func NewMockClient(options ...pegomock.Option) *MockClient { func (mock *MockClient) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockClient) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (mock *MockClient) CreateComment(_param0 models.Repo, _param1 int, _param2 string, _param3 string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pull} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetModifiedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{repo, pullNum, comment, command} + params := []pegomock.Param{_param0, _param1, _param2, _param3} result := pegomock.GetGenericMockFrom(mock).Invoke("CreateComment", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -59,32 +40,40 @@ func (mock *MockClient) CreateComment(repo models.Repo, pullNum int, comment str return ret0 } -func (mock *MockClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string) error { +func (mock *MockClient) DownloadRepoConfigFile(_param0 models.PullRequest) (bool, []byte, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pullNum, command} - result := pegomock.GetGenericMockFrom(mock).Invoke("HidePrevCommandComments", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error + params := []pegomock.Param{_param0} + result := pegomock.GetGenericMockFrom(mock).Invoke("DownloadRepoConfigFile", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*[]byte)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 bool + var ret1 []byte + var ret2 error if len(result) != 0 { if result[0] != nil { - ret0 = result[0].(error) + ret0 = result[0].(bool) + } + if result[1] != nil { + ret1 = result[1].([]byte) + } + if result[2] != nil { + ret2 = result[2].(error) } } - return ret0 + return ret0, ret1, ret2 } -func (mock *MockClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (bool, error) { +func (mock *MockClient) GetModifiedFiles(_param0 models.Repo, _param1 models.PullRequest) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pull} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool + params := []pegomock.Param{_param0, _param1} + result := pegomock.GetGenericMockFrom(mock).Invoke("GetModifiedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 []string var ret1 error if len(result) != 0 { if result[0] != nil { - ret0 = result[0].(bool) + ret0 = result[0].([]string) } if result[1] != nil { ret1 = result[1].(error) @@ -93,17 +82,32 @@ func (mock *MockClient) PullIsApproved(repo models.Repo, pull models.PullRequest return ret0, ret1 } -func (mock *MockClient) PullIsMergeable(repo models.Repo, pull models.PullRequest) (bool, error) { +func (mock *MockClient) HidePrevCommandComments(_param0 models.Repo, _param1 int, _param2 string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pull} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsMergeable", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool + params := []pegomock.Param{_param0, _param1, _param2} + result := pegomock.GetGenericMockFrom(mock).Invoke("HidePrevCommandComments", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(error) + } + } + return ret0 +} + +func (mock *MockClient) MarkdownPullLink(_param0 models.PullRequest) (string, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockClient().") + } + params := []pegomock.Param{_param0} + result := pegomock.GetGenericMockFrom(mock).Invoke("MarkdownPullLink", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 string var ret1 error if len(result) != 0 { if result[0] != nil { - ret0 = result[0].(bool) + ret0 = result[0].(string) } if result[1] != nil { ret1 = result[1].(error) @@ -112,12 +116,12 @@ func (mock *MockClient) PullIsMergeable(repo models.Repo, pull models.PullReques return ret0, ret1 } -func (mock *MockClient) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { +func (mock *MockClient) MergePull(_param0 models.PullRequest, _param1 models.PullRequestOptions) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pull, state, src, description, url} - result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateStatus", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) + params := []pegomock.Param{_param0, _param1} + result := pegomock.GetGenericMockFrom(mock).Invoke("MergePull", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { if result[0] != nil { @@ -127,32 +131,36 @@ func (mock *MockClient) UpdateStatus(repo models.Repo, pull models.PullRequest, return ret0 } -func (mock *MockClient) MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) error { +func (mock *MockClient) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) (models.ApprovalStatus, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{pull, pullOptions} - result := pegomock.GetGenericMockFrom(mock).Invoke("MergePull", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error + params := []pegomock.Param{_param0, _param1} + result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*models.ApprovalStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 models.ApprovalStatus + var ret1 error if len(result) != 0 { if result[0] != nil { - ret0 = result[0].(error) + ret0 = result[0].(models.ApprovalStatus) + } + if result[1] != nil { + ret1 = result[1].(error) } } - return ret0 + return ret0, ret1 } -func (mock *MockClient) MarkdownPullLink(pull models.PullRequest) (string, error) { +func (mock *MockClient) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{pull} - result := pegomock.GetGenericMockFrom(mock).Invoke("MarkdownPullLink", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string + params := []pegomock.Param{repo, user} + result := pegomock.GetGenericMockFrom(mock).Invoke("GetTeamNamesForUser", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 []string var ret1 error if len(result) != 0 { if result[0] != nil { - ret0 = result[0].(string) + ret0 = result[0].([]string) } if result[1] != nil { ret1 = result[1].(error) @@ -161,34 +169,30 @@ func (mock *MockClient) MarkdownPullLink(pull models.PullRequest) (string, error return ret0, ret1 } -func (mock *MockClient) DownloadRepoConfigFile(pull models.PullRequest) (bool, []byte, error) { +func (mock *MockClient) PullIsMergeable(_param0 models.Repo, _param1 models.PullRequest) (bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{pull} - result := pegomock.GetGenericMockFrom(mock).Invoke("DownloadRepoConfigFile", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*[]byte)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + params := []pegomock.Param{_param0, _param1} + result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsMergeable", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 bool - var ret1 []byte - var ret2 error + var ret1 error if len(result) != 0 { if result[0] != nil { ret0 = result[0].(bool) } if result[1] != nil { - ret1 = result[1].([]byte) - } - if result[2] != nil { - ret2 = result[2].(error) + ret1 = result[1].(error) } } - return ret0, ret1, ret2 + return ret0, ret1 } -func (mock *MockClient) SupportsSingleFileDownload(repo models.Repo) bool { +func (mock *MockClient) SupportsSingleFileDownload(_param0 models.Repo) bool { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo} + params := []pegomock.Param{_param0} result := pegomock.GetGenericMockFrom(mock).Invoke("SupportsSingleFileDownload", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) var ret0 bool if len(result) != 0 { @@ -199,6 +203,21 @@ func (mock *MockClient) SupportsSingleFileDownload(repo models.Repo) bool { return ret0 } +func (mock *MockClient) UpdateStatus(_param0 models.Repo, _param1 models.PullRequest, _param2 models.CommitStatus, _param3 string, _param4 string, _param5 string) error { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockClient().") + } + params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5} + result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateStatus", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(error) + } + } + return ret0 +} + func (mock *MockClient) VerifyWasCalledOnce() *VerifierMockClient { return &VerifierMockClient{ mock: mock, @@ -236,39 +255,8 @@ type VerifierMockClient struct { timeout time.Duration } -func (verifier *VerifierMockClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) *MockClient_GetModifiedFiles_OngoingVerification { - params := []pegomock.Param{repo, pull} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetModifiedFiles", params, verifier.timeout) - return &MockClient_GetModifiedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_GetModifiedFiles_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_GetModifiedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - repo, pull := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1] -} - -func (c *MockClient_GetModifiedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) *MockClient_CreateComment_OngoingVerification { - params := []pegomock.Param{repo, pullNum, comment, command} +func (verifier *VerifierMockClient) CreateComment(_param0 models.Repo, _param1 int, _param2 string, _param3 string) *MockClient_CreateComment_OngoingVerification { + params := []pegomock.Param{_param0, _param1, _param2, _param3} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CreateComment", params, verifier.timeout) return &MockClient_CreateComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -279,8 +267,8 @@ type MockClient_CreateComment_OngoingVerification struct { } func (c *MockClient_CreateComment_OngoingVerification) GetCapturedArguments() (models.Repo, int, string, string) { - repo, pullNum, comment, command := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1], comment[len(comment)-1], command[len(command)-1] + _param0, _param1, _param2, _param3 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1] } func (c *MockClient_CreateComment_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string, _param3 []string) { @@ -306,58 +294,50 @@ func (c *MockClient_CreateComment_OngoingVerification) GetAllCapturedArguments() return } -func (verifier *VerifierMockClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string) *MockClient_HidePrevCommandComments_OngoingVerification { - params := []pegomock.Param{repo, pullNum, command} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HidePrevCommandComments", params, verifier.timeout) - return &MockClient_HidePrevCommandComments_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockClient) DownloadRepoConfigFile(_param0 models.PullRequest) *MockClient_DownloadRepoConfigFile_OngoingVerification { + params := []pegomock.Param{_param0} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DownloadRepoConfigFile", params, verifier.timeout) + return &MockClient_DownloadRepoConfigFile_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockClient_HidePrevCommandComments_OngoingVerification struct { +type MockClient_DownloadRepoConfigFile_OngoingVerification struct { mock *MockClient methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetCapturedArguments() (models.Repo, int, string) { - repo, pullNum, command := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1], command[len(command)-1] +func (c *MockClient_DownloadRepoConfigFile_OngoingVerification) GetCapturedArguments() models.PullRequest { + _param0 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1] } -func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string) { +func (c *MockClient_DownloadRepoConfigFile_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) + _param0[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockClient) PullIsApproved(repo models.Repo, pull models.PullRequest) *MockClient_PullIsApproved_OngoingVerification { - params := []pegomock.Param{repo, pull} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) - return &MockClient_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockClient) GetModifiedFiles(_param0 models.Repo, _param1 models.PullRequest) *MockClient_GetModifiedFiles_OngoingVerification { + params := []pegomock.Param{_param0, _param1} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetModifiedFiles", params, verifier.timeout) + return &MockClient_GetModifiedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockClient_PullIsApproved_OngoingVerification struct { +type MockClient_GetModifiedFiles_OngoingVerification struct { mock *MockClient methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - repo, pull := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1] +func (c *MockClient_GetModifiedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] } -func (c *MockClient_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockClient_GetModifiedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { _param0 = make([]models.Repo, len(c.methodInvocations)) @@ -372,86 +352,70 @@ func (c *MockClient_PullIsApproved_OngoingVerification) GetAllCapturedArguments( return } -func (verifier *VerifierMockClient) PullIsMergeable(repo models.Repo, pull models.PullRequest) *MockClient_PullIsMergeable_OngoingVerification { - params := []pegomock.Param{repo, pull} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsMergeable", params, verifier.timeout) - return &MockClient_PullIsMergeable_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockClient) HidePrevCommandComments(_param0 models.Repo, _param1 int, _param2 string) *MockClient_HidePrevCommandComments_OngoingVerification { + params := []pegomock.Param{_param0, _param1, _param2} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HidePrevCommandComments", params, verifier.timeout) + return &MockClient_HidePrevCommandComments_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockClient_PullIsMergeable_OngoingVerification struct { +type MockClient_HidePrevCommandComments_OngoingVerification struct { mock *MockClient methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_PullIsMergeable_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - repo, pull := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1] +func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetCapturedArguments() (models.Repo, int, string) { + _param0, _param1, _param2 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] } -func (c *MockClient_PullIsMergeable_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { _param0 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[0] { _param0[u] = param.(models.Repo) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]int, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(int) + } + _param2 = make([]string, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(string) } } return } -func (verifier *VerifierMockClient) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) *MockClient_UpdateStatus_OngoingVerification { - params := []pegomock.Param{repo, pull, state, src, description, url} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateStatus", params, verifier.timeout) - return &MockClient_UpdateStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockClient) MarkdownPullLink(_param0 models.PullRequest) *MockClient_MarkdownPullLink_OngoingVerification { + params := []pegomock.Param{_param0} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MarkdownPullLink", params, verifier.timeout) + return &MockClient_MarkdownPullLink_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockClient_UpdateStatus_OngoingVerification struct { +type MockClient_MarkdownPullLink_OngoingVerification struct { mock *MockClient methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_UpdateStatus_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, models.CommitStatus, string, string, string) { - repo, pull, state, src, description, url := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1], state[len(state)-1], src[len(src)-1], description[len(description)-1], url[len(url)-1] +func (c *MockClient_MarkdownPullLink_OngoingVerification) GetCapturedArguments() models.PullRequest { + _param0 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1] } -func (c *MockClient_UpdateStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []models.CommitStatus, _param3 []string, _param4 []string, _param5 []string) { +func (c *MockClient_MarkdownPullLink_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - _param2 = make([]models.CommitStatus, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.CommitStatus) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - _param4 = make([]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(string) - } - _param5 = make([]string, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(string) + _param0[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockClient) MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) *MockClient_MergePull_OngoingVerification { - params := []pegomock.Param{pull, pullOptions} +func (verifier *VerifierMockClient) MergePull(_param0 models.PullRequest, _param1 models.PullRequestOptions) *MockClient_MergePull_OngoingVerification { + params := []pegomock.Param{_param0, _param1} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MergePull", params, verifier.timeout) return &MockClient_MergePull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -462,8 +426,8 @@ type MockClient_MergePull_OngoingVerification struct { } func (c *MockClient_MergePull_OngoingVerification) GetCapturedArguments() (models.PullRequest, models.PullRequestOptions) { - pull, pullOptions := c.GetAllCapturedArguments() - return pull[len(pull)-1], pullOptions[len(pullOptions)-1] + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] } func (c *MockClient_MergePull_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest, _param1 []models.PullRequestOptions) { @@ -481,62 +445,81 @@ func (c *MockClient_MergePull_OngoingVerification) GetAllCapturedArguments() (_p return } -func (verifier *VerifierMockClient) MarkdownPullLink(pull models.PullRequest) *MockClient_MarkdownPullLink_OngoingVerification { - params := []pegomock.Param{pull} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MarkdownPullLink", params, verifier.timeout) - return &MockClient_MarkdownPullLink_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockClient) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) *MockClient_PullIsApproved_OngoingVerification { + params := []pegomock.Param{_param0, _param1} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) + return &MockClient_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockClient_MarkdownPullLink_OngoingVerification struct { +type MockClient_PullIsApproved_OngoingVerification struct { mock *MockClient methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_MarkdownPullLink_OngoingVerification) GetCapturedArguments() models.PullRequest { - pull := c.GetAllCapturedArguments() - return pull[len(pull)-1] +func (c *MockClient_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] } -func (c *MockClient_MarkdownPullLink_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { +func (c *MockClient_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) + _param0 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) + _param0[u] = param.(models.Repo) + } + _param1 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockClient) DownloadRepoConfigFile(pull models.PullRequest) *MockClient_DownloadRepoConfigFile_OngoingVerification { - params := []pegomock.Param{pull} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DownloadRepoConfigFile", params, verifier.timeout) - return &MockClient_DownloadRepoConfigFile_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockClient) PullIsMergeable(_param0 models.Repo, _param1 models.PullRequest) *MockClient_PullIsMergeable_OngoingVerification { + params := []pegomock.Param{_param0, _param1} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsMergeable", params, verifier.timeout) + return &MockClient_PullIsMergeable_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockClient_DownloadRepoConfigFile_OngoingVerification struct { +func (verifier *VerifierMockClient) GetTeamNamesForUser(repo models.Repo, user models.User) *MockClient_GetTeamNamesForUser_OngoingVerification { + params := []pegomock.Param{repo, user} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetTeamNamesForUser", params, verifier.timeout) + return &MockClient_GetTeamNamesForUser_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockClient_GetTeamNamesForUser_OngoingVerification struct { mock *MockClient methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_DownloadRepoConfigFile_OngoingVerification) GetCapturedArguments() models.PullRequest { - pull := c.GetAllCapturedArguments() - return pull[len(pull)-1] +type MockClient_PullIsMergeable_OngoingVerification struct { + mock *MockClient + methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_DownloadRepoConfigFile_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { +func (c *MockClient_PullIsMergeable_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] +} + +func (c *MockClient_PullIsMergeable_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) + _param0 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) + _param0[u] = param.(models.Repo) + } + _param1 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockClient) SupportsSingleFileDownload(repo models.Repo) *MockClient_SupportsSingleFileDownload_OngoingVerification { - params := []pegomock.Param{repo} +func (verifier *VerifierMockClient) SupportsSingleFileDownload(_param0 models.Repo) *MockClient_SupportsSingleFileDownload_OngoingVerification { + params := []pegomock.Param{_param0} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SupportsSingleFileDownload", params, verifier.timeout) return &MockClient_SupportsSingleFileDownload_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -547,8 +530,8 @@ type MockClient_SupportsSingleFileDownload_OngoingVerification struct { } func (c *MockClient_SupportsSingleFileDownload_OngoingVerification) GetCapturedArguments() models.Repo { - repo := c.GetAllCapturedArguments() - return repo[len(repo)-1] + _param0 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1] } func (c *MockClient_SupportsSingleFileDownload_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo) { @@ -561,3 +544,50 @@ func (c *MockClient_SupportsSingleFileDownload_OngoingVerification) GetAllCaptur } return } + +func (verifier *VerifierMockClient) UpdateStatus(_param0 models.Repo, _param1 models.PullRequest, _param2 models.CommitStatus, _param3 string, _param4 string, _param5 string) *MockClient_UpdateStatus_OngoingVerification { + params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateStatus", params, verifier.timeout) + return &MockClient_UpdateStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockClient_UpdateStatus_OngoingVerification struct { + mock *MockClient + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockClient_UpdateStatus_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, models.CommitStatus, string, string, string) { + _param0, _param1, _param2, _param3, _param4, _param5 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _param4[len(_param4)-1], _param5[len(_param5)-1] +} + +func (c *MockClient_UpdateStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []models.CommitStatus, _param3 []string, _param4 []string, _param5 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]models.Repo, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(models.Repo) + } + _param1 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.PullRequest) + } + _param2 = make([]models.CommitStatus, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.CommitStatus) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) + } + _param4 = make([]string, len(c.methodInvocations)) + for u, param := range params[4] { + _param4[u] = param.(string) + } + _param5 = make([]string, len(c.methodInvocations)) + for u, param := range params[5] { + _param5[u] = param.(string) + } + } + return +} diff --git a/server/events/vcs/not_configured_vcs_client.go b/server/events/vcs/not_configured_vcs_client.go index 3f8556765f..45d4416228 100644 --- a/server/events/vcs/not_configured_vcs_client.go +++ b/server/events/vcs/not_configured_vcs_client.go @@ -35,8 +35,8 @@ func (a *NotConfiguredVCSClient) CreateComment(repo models.Repo, pullNum int, co func (a *NotConfiguredVCSClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string) error { return nil } -func (a *NotConfiguredVCSClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (bool, error) { - return false, a.err() +func (a *NotConfiguredVCSClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { + return models.ApprovalStatus{}, a.err() } func (a *NotConfiguredVCSClient) PullIsMergeable(repo models.Repo, pull models.PullRequest) (bool, error) { return false, a.err() @@ -53,6 +53,9 @@ func (a *NotConfiguredVCSClient) MarkdownPullLink(pull models.PullRequest) (stri func (a *NotConfiguredVCSClient) err() error { return fmt.Errorf("atlantis was not configured to support repos from %s", a.Host.String()) } +func (a *NotConfiguredVCSClient) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { + return nil, a.err() +} func (a *NotConfiguredVCSClient) SupportsSingleFileDownload(repo models.Repo) bool { return false diff --git a/server/events/vcs/proxy.go b/server/events/vcs/proxy.go index c14563a467..2e937b4cf4 100644 --- a/server/events/vcs/proxy.go +++ b/server/events/vcs/proxy.go @@ -64,7 +64,7 @@ func (d *ClientProxy) HidePrevCommandComments(repo models.Repo, pullNum int, com return d.clients[repo.VCSHost.Type].HidePrevCommandComments(repo, pullNum, command) } -func (d *ClientProxy) PullIsApproved(repo models.Repo, pull models.PullRequest) (bool, error) { +func (d *ClientProxy) PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { return d.clients[repo.VCSHost.Type].PullIsApproved(repo, pull) } @@ -84,6 +84,10 @@ func (d *ClientProxy) MarkdownPullLink(pull models.PullRequest) (string, error) return d.clients[pull.BaseRepo.VCSHost.Type].MarkdownPullLink(pull) } +func (d *ClientProxy) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { + return d.clients[repo.VCSHost.Type].GetTeamNamesForUser(repo, user) +} + func (d *ClientProxy) DownloadRepoConfigFile(pull models.PullRequest) (bool, []byte, error) { return d.clients[pull.BaseRepo.VCSHost.Type].DownloadRepoConfigFile(pull) } diff --git a/server/events/vcs/pull_status_fetcher.go b/server/events/vcs/pull_status_fetcher.go new file mode 100644 index 0000000000..2cbb75266e --- /dev/null +++ b/server/events/vcs/pull_status_fetcher.go @@ -0,0 +1,37 @@ +package vcs + +import ( + "github.com/pkg/errors" + "github.com/runatlantis/atlantis/server/events/models" +) + +type PullReqStatusFetcher interface { + FetchPullStatus(repo models.Repo, pull models.PullRequest) (models.PullReqStatus, error) +} + +type pullReqStatusFetcher struct { + client Client +} + +func NewPullReqStatusFetcher(client Client) PullReqStatusFetcher { + return &pullReqStatusFetcher{ + client: client, + } +} + +func (f *pullReqStatusFetcher) FetchPullStatus(repo models.Repo, pull models.PullRequest) (pullStatus models.PullReqStatus, err error) { + approvalStatus, err := f.client.PullIsApproved(repo, pull) + if err != nil { + return pullStatus, errors.Wrapf(err, "fetching pull approval status for repo: %s, and pull number: %d", repo.FullName, pull.Num) + } + + mergeable, err := f.client.PullIsMergeable(repo, pull) + if err != nil { + return pullStatus, errors.Wrapf(err, "fetching mergeability status for repo: %s, and pull number: %d", repo.FullName, pull.Num) + } + + return models.PullReqStatus{ + ApprovalStatus: approvalStatus, + Mergeable: mergeable, + }, err +} diff --git a/server/events/vcs/status.go b/server/events/vcs/status.go new file mode 100644 index 0000000000..929d3930e1 --- /dev/null +++ b/server/events/vcs/status.go @@ -0,0 +1,39 @@ +package vcs + +import ( + "fmt" + "strings" +) + +type StatusTitleMatcher struct { + TitlePrefix string +} + +func (m StatusTitleMatcher) MatchesCommand(title string, command string) bool { + return strings.HasPrefix(title, fmt.Sprintf("%s/%s", m.TitlePrefix, command)) +} + +type StatusTitleBuilder struct { + TitlePrefix string +} + +type StatusTitleOptions struct { + ProjectName string +} + +func (b StatusTitleBuilder) Build(command string, options ...StatusTitleOptions) string { + src := fmt.Sprintf("%s/%s", b.TitlePrefix, command) + + var projectName string + for _, opt := range options { + if opt.ProjectName != "" { + projectName = opt.ProjectName + } + } + + if projectName != "" { + src = fmt.Sprintf("%s: %s", src, projectName) + } + + return src +} diff --git a/server/events/vcs/status_test.go b/server/events/vcs/status_test.go new file mode 100644 index 0000000000..144e37480b --- /dev/null +++ b/server/events/vcs/status_test.go @@ -0,0 +1,32 @@ +package vcs_test + +import ( + "testing" + + "github.com/runatlantis/atlantis/server/events/vcs" + "github.com/stretchr/testify/assert" +) + +func TestMatches(t *testing.T) { + + t.Run("in sync with builder", func(t *testing.T) { + titlePrefix := "atlantis-test" + command := "apply" + builder := vcs.StatusTitleBuilder{TitlePrefix: titlePrefix} + matcher := vcs.StatusTitleMatcher{TitlePrefix: titlePrefix} + + title := builder.Build(command) + + assert.True(t, matcher.MatchesCommand(title, command)) + }) + + t.Run("incorrect command", func(t *testing.T) { + titlePrefix := "atlantis-test" + builder := vcs.StatusTitleBuilder{TitlePrefix: titlePrefix} + matcher := vcs.StatusTitleMatcher{TitlePrefix: titlePrefix} + + title := builder.Build("apply") + + assert.False(t, matcher.MatchesCommand(title, "plan")) + }) +} diff --git a/server/events/yaml/parser_validator.go b/server/events/yaml/parser_validator.go index d21b221ccd..00f4592f36 100644 --- a/server/events/yaml/parser_validator.go +++ b/server/events/yaml/parser_validator.go @@ -3,7 +3,6 @@ package yaml import ( "encoding/json" "fmt" - "io/ioutil" "os" "path/filepath" "strings" @@ -46,7 +45,7 @@ func (p *ParserValidator) HasRepoCfg(absRepoDir string) (bool, error) { // If there was no config file, it will return an os.IsNotExist(error). func (p *ParserValidator) ParseRepoCfg(absRepoDir string, globalCfg valid.GlobalCfg, repoID string) (valid.RepoCfg, error) { configFile := p.repoCfgPath(absRepoDir, AtlantisYAMLFilename) - configData, err := ioutil.ReadFile(configFile) // nolint: gosec + configData, err := os.ReadFile(configFile) // nolint: gosec if err != nil { if !os.IsNotExist(err) { @@ -94,7 +93,7 @@ func (p *ParserValidator) ParseRepoCfgData(repoCfgData []byte, globalCfg valid.G // configFile. defaultCfg will be merged into the parsed config. // If there is no file at configFile it will return an error. func (p *ParserValidator) ParseGlobalCfg(configFile string, defaultCfg valid.GlobalCfg) (valid.GlobalCfg, error) { - configData, err := ioutil.ReadFile(configFile) // nolint: gosec + configData, err := os.ReadFile(configFile) // nolint: gosec if err != nil { return valid.GlobalCfg{}, errors.Wrapf(err, "unable to read %s file", configFile) } diff --git a/server/events/yaml/parser_validator_test.go b/server/events/yaml/parser_validator_test.go index a722585e07..a58db826ef 100644 --- a/server/events/yaml/parser_validator_test.go +++ b/server/events/yaml/parser_validator_test.go @@ -2,7 +2,6 @@ package yaml_test import ( "fmt" - "io/ioutil" "os" "path/filepath" "regexp" @@ -68,7 +67,7 @@ func TestParseRepoCfg_FileDoesNotExist(t *testing.T) { func TestParseRepoCfg_BadPermissions(t *testing.T) { tmpDir, cleanup := TempDir(t) defer cleanup() - err := ioutil.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), nil, 0000) + err := os.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), nil, 0000) Ok(t, err) r := yaml.ParserValidator{} @@ -103,7 +102,7 @@ func TestParseCfgs_InvalidYAML(t *testing.T) { for _, c := range cases { t.Run(c.description, func(t *testing.T) { confPath := filepath.Join(tmpDir, "atlantis.yaml") - err := ioutil.WriteFile(confPath, []byte(c.input), 0600) + err := os.WriteFile(confPath, []byte(c.input), 0600) Ok(t, err) r := yaml.ParserValidator{} _, err = r.ParseRepoCfg(tmpDir, globalCfg, "") @@ -1068,7 +1067,7 @@ workflows: for _, c := range cases { t.Run(c.description, func(t *testing.T) { - err := ioutil.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), []byte(c.input), 0600) + err := os.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), []byte(c.input), 0600) Ok(t, err) r := yaml.ParserValidator{} @@ -1096,7 +1095,7 @@ projects: workflow: custom workflows: custom: ~` - err := ioutil.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), []byte(repoCfg), 0600) + err := os.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), []byte(repoCfg), 0600) Ok(t, err) r := yaml.ParserValidator{} @@ -1474,7 +1473,7 @@ workflows: tmp, cleanup := TempDir(t) defer cleanup() path := filepath.Join(tmp, "conf.yaml") - Ok(t, ioutil.WriteFile(path, []byte(c.input), 0600)) + Ok(t, os.WriteFile(path, []byte(c.input), 0600)) globalCfgArgs := valid.GlobalCfgArgs{ AllowRepoCfg: false, @@ -1735,8 +1734,8 @@ func TestParseRepoCfg_V2ShellParsing(t *testing.T) { apply: steps: - run: %s`, c.in, c.in) - Ok(t, ioutil.WriteFile(v2Path, []byte("version: 2\n"+cfg), 0600)) - Ok(t, ioutil.WriteFile(v3Path, []byte("version: 3\n"+cfg), 0600)) + Ok(t, os.WriteFile(v2Path, []byte("version: 2\n"+cfg), 0600)) + Ok(t, os.WriteFile(v3Path, []byte("version: 3\n"+cfg), 0600)) p := &yaml.ParserValidator{} globalCfgArgs := valid.GlobalCfgArgs{ diff --git a/server/events/yaml/raw/repo_cfg.go b/server/events/yaml/raw/repo_cfg.go index 3f90803bd5..444175f6da 100644 --- a/server/events/yaml/raw/repo_cfg.go +++ b/server/events/yaml/raw/repo_cfg.go @@ -32,6 +32,7 @@ type RepoCfg struct { ParallelApply *bool `yaml:"parallel_apply,omitempty"` ParallelPlan *bool `yaml:"parallel_plan,omitempty"` DeleteSourceBranchOnMerge *bool `yaml:"delete_source_branch_on_merge,omitempty"` + AllowedRegexpPrefixes []string `yaml:"allowed_regexp_prefixes,omitempty"` } func (r RepoCfg) Validate() error { @@ -87,5 +88,6 @@ func (r RepoCfg) ToValid() valid.RepoCfg { ParallelPlan: parallelPlan, ParallelPolicyCheck: parallelPlan, DeleteSourceBranchOnMerge: r.DeleteSourceBranchOnMerge, + AllowedRegexpPrefixes: r.AllowedRegexpPrefixes, } } diff --git a/server/events/yaml/raw/repo_cfg_test.go b/server/events/yaml/raw/repo_cfg_test.go index 430b8c61b9..aa2deb8c95 100644 --- a/server/events/yaml/raw/repo_cfg_test.go +++ b/server/events/yaml/raw/repo_cfg_test.go @@ -144,7 +144,10 @@ workflows: policy_check: steps: [] apply: - steps: []`, + steps: [] +allowed_regexp_prefixes: +- dev/ +- staging/`, exp: raw.RepoCfg{ Version: Int(3), Automerge: Bool(true), @@ -176,6 +179,7 @@ workflows: }, }, }, + AllowedRegexpPrefixes: []string{"dev/", "staging/"}, }, }, } diff --git a/server/events/yaml/valid/global_cfg.go b/server/events/yaml/valid/global_cfg.go index 81068d11ab..6a630a8ca8 100644 --- a/server/events/yaml/valid/global_cfg.go +++ b/server/events/yaml/valid/global_cfg.go @@ -27,7 +27,7 @@ const DeleteSourceBranchOnMergeKey = "delete_source_branch_on_merge" // TODO: Make this more customizable, not everyone wants this rigid workflow // maybe something along the lines of defining overridable/non-overrideable apply // requirements in the config and removing the flag to enable policy checking. -var NonOverrideableApplyReqs []string = []string{PoliciesPassedApplyReq} +var NonOverrideableApplyReqs = []string{PoliciesPassedApplyReq} // GlobalCfg is the final parsed version of server-side repo config. type GlobalCfg struct { diff --git a/server/events/yaml/valid/global_cfg_test.go b/server/events/yaml/valid/global_cfg_test.go index f317a81015..d1570ac2ed 100644 --- a/server/events/yaml/valid/global_cfg_test.go +++ b/server/events/yaml/valid/global_cfg_test.go @@ -2,7 +2,7 @@ package valid_test import ( "fmt" - "io/ioutil" + "os" "path/filepath" "regexp" "testing" @@ -661,7 +661,7 @@ policies: var global valid.GlobalCfg if c.gCfg != "" { path := filepath.Join(tmp, "config.yaml") - Ok(t, ioutil.WriteFile(path, []byte(c.gCfg), 0600)) + Ok(t, os.WriteFile(path, []byte(c.gCfg), 0600)) var err error globalCfgArgs := valid.GlobalCfgArgs{ AllowRepoCfg: false, @@ -832,7 +832,7 @@ repos: var global valid.GlobalCfg if c.gCfg != "" { path := filepath.Join(tmp, "config.yaml") - Ok(t, ioutil.WriteFile(path, []byte(c.gCfg), 0600)) + Ok(t, os.WriteFile(path, []byte(c.gCfg), 0600)) var err error globalCfgArgs := valid.GlobalCfgArgs{ AllowRepoCfg: false, diff --git a/server/events/yaml/valid/repo_cfg.go b/server/events/yaml/valid/repo_cfg.go index b107c06b41..c73b978e35 100644 --- a/server/events/yaml/valid/repo_cfg.go +++ b/server/events/yaml/valid/repo_cfg.go @@ -22,6 +22,7 @@ type RepoCfg struct { ParallelPlan bool ParallelPolicyCheck bool DeleteSourceBranchOnMerge *bool + AllowedRegexpPrefixes []string } func (r RepoCfg) FindProjectsByDirWorkspace(repoRelDir string, workspace string) []Project { @@ -57,17 +58,31 @@ func (r RepoCfg) FindProjectByName(name string) *Project { // FindProjectsByName returns all projects that match with name. func (r RepoCfg) FindProjectsByName(name string) []Project { var ps []Project - sanitizedName := "^" + name + "$" - for _, p := range r.Projects { - if p.Name != nil { - if match, _ := regexp.MatchString(sanitizedName, *p.Name); match { - ps = append(ps, p) + if isRegexAllowed(name, r.AllowedRegexpPrefixes) { + sanitizedName := "^" + name + "$" + for _, p := range r.Projects { + if p.Name != nil { + if match, _ := regexp.MatchString(sanitizedName, *p.Name); match { + ps = append(ps, p) + } } } } return ps } +func isRegexAllowed(name string, allowedRegexpPrefixes []string) bool { + if len(allowedRegexpPrefixes) == 0 { + return true + } + for _, allowedRegexPrefix := range allowedRegexpPrefixes { + if strings.HasPrefix(name, allowedRegexPrefix) { + return true + } + } + return false +} + // validateWorkspaceAllowed returns an error if repoCfg defines projects in // repoRelDir but none of them use workspace. We want this to be an error // because if users have gone to the trouble of defining projects in repoRelDir diff --git a/server/events/yaml/valid/repo_cfg_test.go b/server/events/yaml/valid/repo_cfg_test.go new file mode 100644 index 0000000000..28cee4fc9f --- /dev/null +++ b/server/events/yaml/valid/repo_cfg_test.go @@ -0,0 +1,175 @@ +package valid_test + +import ( + "testing" + + validation "github.com/go-ozzo/ozzo-validation" + version "github.com/hashicorp/go-version" + "github.com/runatlantis/atlantis/server/events/yaml/valid" + . "github.com/runatlantis/atlantis/testing" +) + +func TestConfig_FindProjectsByDir(t *testing.T) { + tfVersion, _ := version.NewVersion("v0.11.0") + cases := []struct { + description string + nameRegex string + input valid.RepoCfg + expProjects []valid.Project + }{ + { + description: "Find projects with 'dev' prefix as allowed prefix", + nameRegex: "dev.*", + input: valid.RepoCfg{ + Version: 3, + Projects: []valid.Project{ + { + Dir: ".", + Name: String("dev_terragrunt_myproject"), + Workspace: "myworkspace", + TerraformVersion: tfVersion, + Autoplan: valid.Autoplan{ + WhenModified: []string{"**/*.tf*", "**/terragrunt.hcl"}, + Enabled: false, + }, + ApplyRequirements: []string{"approved"}, + }, + }, + Workflows: map[string]valid.Workflow{ + "myworkflow": { + Name: "myworkflow", + Apply: valid.DefaultApplyStage, + Plan: valid.DefaultPlanStage, + PolicyCheck: valid.DefaultPolicyCheckStage, + }, + }, + AllowedRegexpPrefixes: []string{"dev", "staging"}, + }, + expProjects: []valid.Project{ + { + Dir: ".", + Name: String("dev_terragrunt_myproject"), + Workspace: "myworkspace", + TerraformVersion: tfVersion, + Autoplan: valid.Autoplan{ + WhenModified: []string{"**/*.tf*", "**/terragrunt.hcl"}, + Enabled: false, + }, + ApplyRequirements: []string{"approved"}, + }, + }, + }, + { + description: "Only find projects with allowed prefix", + nameRegex: ".*", + input: valid.RepoCfg{ + Version: 3, + Projects: []valid.Project{ + { + Dir: ".", + Name: String("dev_terragrunt_myproject"), + Workspace: "myworkspace", + TerraformVersion: tfVersion, + Autoplan: valid.Autoplan{ + WhenModified: []string{"**/*.tf*", "**/terragrunt.hcl"}, + Enabled: false, + }, + ApplyRequirements: []string{"approved"}, + }, + { + Dir: ".", + Name: String("staging_terragrunt_myproject"), + Workspace: "myworkspace", + TerraformVersion: tfVersion, + Autoplan: valid.Autoplan{ + WhenModified: []string{"**/*.tf*", "**/terragrunt.hcl"}, + Enabled: false, + }, + ApplyRequirements: []string{"approved"}, + }, + }, + Workflows: map[string]valid.Workflow{ + "myworkflow": { + Name: "myworkflow", + Apply: valid.DefaultApplyStage, + Plan: valid.DefaultPlanStage, + PolicyCheck: valid.DefaultPolicyCheckStage, + }, + }, + AllowedRegexpPrefixes: []string{"dev", "staging"}, + }, + expProjects: nil, + }, + { + description: "Find all projects without restrictions of allowed prefix", + nameRegex: ".*", + input: valid.RepoCfg{ + Version: 3, + Projects: []valid.Project{ + { + Dir: ".", + Name: String("dev_terragrunt_myproject"), + Workspace: "myworkspace", + TerraformVersion: tfVersion, + Autoplan: valid.Autoplan{ + WhenModified: []string{"**/*.tf*", "**/terragrunt.hcl"}, + Enabled: false, + }, + ApplyRequirements: []string{"approved"}, + }, + { + Dir: ".", + Name: String("staging_terragrunt_myproject"), + Workspace: "myworkspace", + TerraformVersion: tfVersion, + Autoplan: valid.Autoplan{ + WhenModified: []string{"**/*.tf*", "**/terragrunt.hcl"}, + Enabled: false, + }, + ApplyRequirements: []string{"approved"}, + }, + }, + Workflows: map[string]valid.Workflow{ + "myworkflow": { + Name: "myworkflow", + Apply: valid.DefaultApplyStage, + Plan: valid.DefaultPlanStage, + PolicyCheck: valid.DefaultPolicyCheckStage, + }, + }, + AllowedRegexpPrefixes: nil, + }, + expProjects: []valid.Project{ + { + Dir: ".", + Name: String("dev_terragrunt_myproject"), + Workspace: "myworkspace", + TerraformVersion: tfVersion, + Autoplan: valid.Autoplan{ + WhenModified: []string{"**/*.tf*", "**/terragrunt.hcl"}, + Enabled: false, + }, + ApplyRequirements: []string{"approved"}, + }, + { + Dir: ".", + Name: String("staging_terragrunt_myproject"), + Workspace: "myworkspace", + TerraformVersion: tfVersion, + Autoplan: valid.Autoplan{ + WhenModified: []string{"**/*.tf*", "**/terragrunt.hcl"}, + Enabled: false, + }, + ApplyRequirements: []string{"approved"}, + }, + }, + }, + } + validation.ErrorTag = "yaml" + for _, c := range cases { + t.Run(c.description, func(t *testing.T) { + projects := c.input.FindProjectsByName(c.nameRegex) + Equals(t, c.expProjects, projects) + }) + } +} diff --git a/server/handlers/mocks/matchers/chan_of_string.go b/server/handlers/mocks/matchers/chan_of_string.go new file mode 100644 index 0000000000..e1bfee5726 --- /dev/null +++ b/server/handlers/mocks/matchers/chan_of_string.go @@ -0,0 +1,31 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" +) + +func AnyChanOfString() chan string { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(chan string))(nil)).Elem())) + var nullValue chan string + return nullValue +} + +func EqChanOfString(value chan string) chan string { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue chan string + return nullValue +} + +func NotEqChanOfString(value chan string) chan string { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue chan string + return nullValue +} + +func ChanOfStringThat(matcher pegomock.ArgumentMatcher) chan string { + pegomock.RegisterMatcher(matcher) + var nullValue chan string + return nullValue +} diff --git a/server/handlers/mocks/matchers/http_header.go b/server/handlers/mocks/matchers/http_header.go new file mode 100644 index 0000000000..7531557917 --- /dev/null +++ b/server/handlers/mocks/matchers/http_header.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + http "net/http" +) + +func AnyHttpHeader() http.Header { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(http.Header))(nil)).Elem())) + var nullValue http.Header + return nullValue +} + +func EqHttpHeader(value http.Header) http.Header { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue http.Header + return nullValue +} + +func NotEqHttpHeader(value http.Header) http.Header { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue http.Header + return nullValue +} + +func HttpHeaderThat(matcher pegomock.ArgumentMatcher) http.Header { + pegomock.RegisterMatcher(matcher) + var nullValue http.Header + return nullValue +} diff --git a/server/handlers/mocks/matchers/http_responsewriter.go b/server/handlers/mocks/matchers/http_responsewriter.go new file mode 100644 index 0000000000..1927eca531 --- /dev/null +++ b/server/handlers/mocks/matchers/http_responsewriter.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + http "net/http" +) + +func AnyHttpResponseWriter() http.ResponseWriter { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(http.ResponseWriter))(nil)).Elem())) + var nullValue http.ResponseWriter + return nullValue +} + +func EqHttpResponseWriter(value http.ResponseWriter) http.ResponseWriter { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue http.ResponseWriter + return nullValue +} + +func NotEqHttpResponseWriter(value http.ResponseWriter) http.ResponseWriter { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue http.ResponseWriter + return nullValue +} + +func HttpResponseWriterThat(matcher pegomock.ArgumentMatcher) http.ResponseWriter { + pegomock.RegisterMatcher(matcher) + var nullValue http.ResponseWriter + return nullValue +} diff --git a/server/handlers/mocks/matchers/map_of_chan_of_string_to_bool.go b/server/handlers/mocks/matchers/map_of_chan_of_string_to_bool.go new file mode 100644 index 0000000000..5cd33d3bac --- /dev/null +++ b/server/handlers/mocks/matchers/map_of_chan_of_string_to_bool.go @@ -0,0 +1,31 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" +) + +func AnyMapOfChanOfStringToBool() map[chan string]bool { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(map[chan string]bool))(nil)).Elem())) + var nullValue map[chan string]bool + return nullValue +} + +func EqMapOfChanOfStringToBool(value map[chan string]bool) map[chan string]bool { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue map[chan string]bool + return nullValue +} + +func NotEqMapOfChanOfStringToBool(value map[chan string]bool) map[chan string]bool { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue map[chan string]bool + return nullValue +} + +func MapOfChanOfStringToBoolThat(matcher pegomock.ArgumentMatcher) map[chan string]bool { + pegomock.RegisterMatcher(matcher) + var nullValue map[chan string]bool + return nullValue +} diff --git a/server/handlers/mocks/matchers/models_commandname.go b/server/handlers/mocks/matchers/models_commandname.go new file mode 100644 index 0000000000..f586b4d216 --- /dev/null +++ b/server/handlers/mocks/matchers/models_commandname.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + models "github.com/runatlantis/atlantis/server/events/models" +) + +func AnyModelsCommandName() models.CommandName { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.CommandName))(nil)).Elem())) + var nullValue models.CommandName + return nullValue +} + +func EqModelsCommandName(value models.CommandName) models.CommandName { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue models.CommandName + return nullValue +} + +func NotEqModelsCommandName(value models.CommandName) models.CommandName { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue models.CommandName + return nullValue +} + +func ModelsCommandNameThat(matcher pegomock.ArgumentMatcher) models.CommandName { + pegomock.RegisterMatcher(matcher) + var nullValue models.CommandName + return nullValue +} diff --git a/server/handlers/mocks/matchers/models_commitstatus.go b/server/handlers/mocks/matchers/models_commitstatus.go new file mode 100644 index 0000000000..1e10ed7823 --- /dev/null +++ b/server/handlers/mocks/matchers/models_commitstatus.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + models "github.com/runatlantis/atlantis/server/events/models" +) + +func AnyModelsCommitStatus() models.CommitStatus { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.CommitStatus))(nil)).Elem())) + var nullValue models.CommitStatus + return nullValue +} + +func EqModelsCommitStatus(value models.CommitStatus) models.CommitStatus { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue models.CommitStatus + return nullValue +} + +func NotEqModelsCommitStatus(value models.CommitStatus) models.CommitStatus { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue models.CommitStatus + return nullValue +} + +func ModelsCommitStatusThat(matcher pegomock.ArgumentMatcher) models.CommitStatus { + pegomock.RegisterMatcher(matcher) + var nullValue models.CommitStatus + return nullValue +} diff --git a/server/handlers/mocks/matchers/models_projectcommandcontext.go b/server/handlers/mocks/matchers/models_projectcommandcontext.go new file mode 100644 index 0000000000..535f8b9671 --- /dev/null +++ b/server/handlers/mocks/matchers/models_projectcommandcontext.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + models "github.com/runatlantis/atlantis/server/events/models" +) + +func AnyModelsProjectCommandContext() models.ProjectCommandContext { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.ProjectCommandContext))(nil)).Elem())) + var nullValue models.ProjectCommandContext + return nullValue +} + +func EqModelsProjectCommandContext(value models.ProjectCommandContext) models.ProjectCommandContext { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue models.ProjectCommandContext + return nullValue +} + +func NotEqModelsProjectCommandContext(value models.ProjectCommandContext) models.ProjectCommandContext { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue models.ProjectCommandContext + return nullValue +} + +func ModelsProjectCommandContextThat(matcher pegomock.ArgumentMatcher) models.ProjectCommandContext { + pegomock.RegisterMatcher(matcher) + var nullValue models.ProjectCommandContext + return nullValue +} diff --git a/server/handlers/mocks/matchers/ptr_to_http_request.go b/server/handlers/mocks/matchers/ptr_to_http_request.go new file mode 100644 index 0000000000..dfbfc18674 --- /dev/null +++ b/server/handlers/mocks/matchers/ptr_to_http_request.go @@ -0,0 +1,33 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" + + http "net/http" +) + +func AnyPtrToHttpRequest() *http.Request { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*http.Request))(nil)).Elem())) + var nullValue *http.Request + return nullValue +} + +func EqPtrToHttpRequest(value *http.Request) *http.Request { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue *http.Request + return nullValue +} + +func NotEqPtrToHttpRequest(value *http.Request) *http.Request { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue *http.Request + return nullValue +} + +func PtrToHttpRequestThat(matcher pegomock.ArgumentMatcher) *http.Request { + pegomock.RegisterMatcher(matcher) + var nullValue *http.Request + return nullValue +} diff --git a/server/handlers/mocks/matchers/slice_of_byte.go b/server/handlers/mocks/matchers/slice_of_byte.go new file mode 100644 index 0000000000..9515313456 --- /dev/null +++ b/server/handlers/mocks/matchers/slice_of_byte.go @@ -0,0 +1,31 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" +) + +func AnySliceOfByte() []byte { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]byte))(nil)).Elem())) + var nullValue []byte + return nullValue +} + +func EqSliceOfByte(value []byte) []byte { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue []byte + return nullValue +} + +func NotEqSliceOfByte(value []byte) []byte { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue []byte + return nullValue +} + +func SliceOfByteThat(matcher pegomock.ArgumentMatcher) []byte { + pegomock.RegisterMatcher(matcher) + var nullValue []byte + return nullValue +} diff --git a/server/handlers/mocks/matchers/slice_of_string.go b/server/handlers/mocks/matchers/slice_of_string.go new file mode 100644 index 0000000000..f9281819dd --- /dev/null +++ b/server/handlers/mocks/matchers/slice_of_string.go @@ -0,0 +1,31 @@ +// Code generated by pegomock. DO NOT EDIT. +package matchers + +import ( + "github.com/petergtz/pegomock" + "reflect" +) + +func AnySliceOfString() []string { + pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]string))(nil)).Elem())) + var nullValue []string + return nullValue +} + +func EqSliceOfString(value []string) []string { + pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) + var nullValue []string + return nullValue +} + +func NotEqSliceOfString(value []string) []string { + pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) + var nullValue []string + return nullValue +} + +func SliceOfStringThat(matcher pegomock.ArgumentMatcher) []string { + pegomock.RegisterMatcher(matcher) + var nullValue []string + return nullValue +} diff --git a/server/handlers/mocks/mock_project_command_output_handler.go b/server/handlers/mocks/mock_project_command_output_handler.go new file mode 100644 index 0000000000..f119e70290 --- /dev/null +++ b/server/handlers/mocks/mock_project_command_output_handler.go @@ -0,0 +1,325 @@ +// Code generated by pegomock. DO NOT EDIT. +// Source: github.com/runatlantis/atlantis/server/handlers (interfaces: ProjectCommandOutputHandler) + +package mocks + +import ( + pegomock "github.com/petergtz/pegomock" + models "github.com/runatlantis/atlantis/server/events/models" + "reflect" + "time" +) + +type MockProjectCommandOutputHandler struct { + fail func(message string, callerSkip ...int) +} + +func NewMockProjectCommandOutputHandler(options ...pegomock.Option) *MockProjectCommandOutputHandler { + mock := &MockProjectCommandOutputHandler{} + for _, option := range options { + option.Apply(mock) + } + return mock +} + +func (mock *MockProjectCommandOutputHandler) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } +func (mock *MockProjectCommandOutputHandler) FailHandler() pegomock.FailHandler { return mock.fail } + +func (mock *MockProjectCommandOutputHandler) CleanUp(_param0 string) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") + } + params := []pegomock.Param{_param0} + pegomock.GetGenericMockFrom(mock).Invoke("CleanUp", params, []reflect.Type{}) +} + +func (mock *MockProjectCommandOutputHandler) Clear(_param0 models.ProjectCommandContext) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") + } + params := []pegomock.Param{_param0} + pegomock.GetGenericMockFrom(mock).Invoke("Clear", params, []reflect.Type{}) +} + +func (mock *MockProjectCommandOutputHandler) Deregister(_param0 string, _param1 chan string) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") + } + params := []pegomock.Param{_param0, _param1} + pegomock.GetGenericMockFrom(mock).Invoke("Deregister", params, []reflect.Type{}) +} + +func (mock *MockProjectCommandOutputHandler) Handle() { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") + } + params := []pegomock.Param{} + pegomock.GetGenericMockFrom(mock).Invoke("Handle", params, []reflect.Type{}) +} + +func (mock *MockProjectCommandOutputHandler) Register(_param0 string, _param1 chan string) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") + } + params := []pegomock.Param{_param0, _param1} + pegomock.GetGenericMockFrom(mock).Invoke("Register", params, []reflect.Type{}) +} + +func (mock *MockProjectCommandOutputHandler) Send(_param0 models.ProjectCommandContext, _param1 string) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") + } + params := []pegomock.Param{_param0, _param1} + pegomock.GetGenericMockFrom(mock).Invoke("Send", params, []reflect.Type{}) +} + +func (mock *MockProjectCommandOutputHandler) SetJobURLWithStatus(_param0 models.ProjectCommandContext, _param1 models.CommandName, _param2 models.CommitStatus) error { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") + } + params := []pegomock.Param{_param0, _param1, _param2} + result := pegomock.GetGenericMockFrom(mock).Invoke("SetJobURLWithStatus", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(error) + } + } + return ret0 +} + +func (mock *MockProjectCommandOutputHandler) VerifyWasCalledOnce() *VerifierMockProjectCommandOutputHandler { + return &VerifierMockProjectCommandOutputHandler{ + mock: mock, + invocationCountMatcher: pegomock.Times(1), + } +} + +func (mock *MockProjectCommandOutputHandler) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockProjectCommandOutputHandler { + return &VerifierMockProjectCommandOutputHandler{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + } +} + +func (mock *MockProjectCommandOutputHandler) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockProjectCommandOutputHandler { + return &VerifierMockProjectCommandOutputHandler{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + inOrderContext: inOrderContext, + } +} + +func (mock *MockProjectCommandOutputHandler) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockProjectCommandOutputHandler { + return &VerifierMockProjectCommandOutputHandler{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + timeout: timeout, + } +} + +type VerifierMockProjectCommandOutputHandler struct { + mock *MockProjectCommandOutputHandler + invocationCountMatcher pegomock.InvocationCountMatcher + inOrderContext *pegomock.InOrderContext + timeout time.Duration +} + +func (verifier *VerifierMockProjectCommandOutputHandler) CleanUp(_param0 string) *MockProjectCommandOutputHandler_CleanUp_OngoingVerification { + params := []pegomock.Param{_param0} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CleanUp", params, verifier.timeout) + return &MockProjectCommandOutputHandler_CleanUp_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectCommandOutputHandler_CleanUp_OngoingVerification struct { + mock *MockProjectCommandOutputHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectCommandOutputHandler_CleanUp_OngoingVerification) GetCapturedArguments() string { + _param0 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1] +} + +func (c *MockProjectCommandOutputHandler_CleanUp_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(string) + } + } + return +} + +func (verifier *VerifierMockProjectCommandOutputHandler) Clear(_param0 models.ProjectCommandContext) *MockProjectCommandOutputHandler_Clear_OngoingVerification { + params := []pegomock.Param{_param0} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clear", params, verifier.timeout) + return &MockProjectCommandOutputHandler_Clear_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectCommandOutputHandler_Clear_OngoingVerification struct { + mock *MockProjectCommandOutputHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectCommandOutputHandler_Clear_OngoingVerification) GetCapturedArguments() models.ProjectCommandContext { + _param0 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1] +} + +func (c *MockProjectCommandOutputHandler_Clear_OngoingVerification) GetAllCapturedArguments() (_param0 []models.ProjectCommandContext) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]models.ProjectCommandContext, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(models.ProjectCommandContext) + } + } + return +} + +func (verifier *VerifierMockProjectCommandOutputHandler) Deregister(_param0 string, _param1 chan string) *MockProjectCommandOutputHandler_Deregister_OngoingVerification { + params := []pegomock.Param{_param0, _param1} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Deregister", params, verifier.timeout) + return &MockProjectCommandOutputHandler_Deregister_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectCommandOutputHandler_Deregister_OngoingVerification struct { + mock *MockProjectCommandOutputHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectCommandOutputHandler_Deregister_OngoingVerification) GetCapturedArguments() (string, chan string) { + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] +} + +func (c *MockProjectCommandOutputHandler_Deregister_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []chan string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(string) + } + _param1 = make([]chan string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(chan string) + } + } + return +} + +func (verifier *VerifierMockProjectCommandOutputHandler) Handle() *MockProjectCommandOutputHandler_Handle_OngoingVerification { + params := []pegomock.Param{} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Handle", params, verifier.timeout) + return &MockProjectCommandOutputHandler_Handle_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectCommandOutputHandler_Handle_OngoingVerification struct { + mock *MockProjectCommandOutputHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectCommandOutputHandler_Handle_OngoingVerification) GetCapturedArguments() { +} + +func (c *MockProjectCommandOutputHandler_Handle_OngoingVerification) GetAllCapturedArguments() { +} + +func (verifier *VerifierMockProjectCommandOutputHandler) Register(_param0 string, _param1 chan string) *MockProjectCommandOutputHandler_Register_OngoingVerification { + params := []pegomock.Param{_param0, _param1} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Register", params, verifier.timeout) + return &MockProjectCommandOutputHandler_Register_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectCommandOutputHandler_Register_OngoingVerification struct { + mock *MockProjectCommandOutputHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectCommandOutputHandler_Register_OngoingVerification) GetCapturedArguments() (string, chan string) { + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] +} + +func (c *MockProjectCommandOutputHandler_Register_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []chan string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(string) + } + _param1 = make([]chan string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(chan string) + } + } + return +} + +func (verifier *VerifierMockProjectCommandOutputHandler) Send(_param0 models.ProjectCommandContext, _param1 string) *MockProjectCommandOutputHandler_Send_OngoingVerification { + params := []pegomock.Param{_param0, _param1} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Send", params, verifier.timeout) + return &MockProjectCommandOutputHandler_Send_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectCommandOutputHandler_Send_OngoingVerification struct { + mock *MockProjectCommandOutputHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectCommandOutputHandler_Send_OngoingVerification) GetCapturedArguments() (models.ProjectCommandContext, string) { + _param0, _param1 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1] +} + +func (c *MockProjectCommandOutputHandler_Send_OngoingVerification) GetAllCapturedArguments() (_param0 []models.ProjectCommandContext, _param1 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]models.ProjectCommandContext, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(models.ProjectCommandContext) + } + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(string) + } + } + return +} + +func (verifier *VerifierMockProjectCommandOutputHandler) SetJobURLWithStatus(_param0 models.ProjectCommandContext, _param1 models.CommandName, _param2 models.CommitStatus) *MockProjectCommandOutputHandler_SetJobURLWithStatus_OngoingVerification { + params := []pegomock.Param{_param0, _param1, _param2} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetJobURLWithStatus", params, verifier.timeout) + return &MockProjectCommandOutputHandler_SetJobURLWithStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectCommandOutputHandler_SetJobURLWithStatus_OngoingVerification struct { + mock *MockProjectCommandOutputHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectCommandOutputHandler_SetJobURLWithStatus_OngoingVerification) GetCapturedArguments() (models.ProjectCommandContext, models.CommandName, models.CommitStatus) { + _param0, _param1, _param2 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] +} + +func (c *MockProjectCommandOutputHandler_SetJobURLWithStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []models.ProjectCommandContext, _param1 []models.CommandName, _param2 []models.CommitStatus) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]models.ProjectCommandContext, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(models.ProjectCommandContext) + } + _param1 = make([]models.CommandName, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.CommandName) + } + _param2 = make([]models.CommitStatus, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.CommitStatus) + } + } + return +} diff --git a/server/handlers/mocks/mock_project_job_url_generator.go b/server/handlers/mocks/mock_project_job_url_generator.go new file mode 100644 index 0000000000..7386a383cc --- /dev/null +++ b/server/handlers/mocks/mock_project_job_url_generator.go @@ -0,0 +1,109 @@ +// Code generated by pegomock. DO NOT EDIT. +// Source: github.com/runatlantis/atlantis/server/handlers (interfaces: ProjectJobURLGenerator) + +package mocks + +import ( + pegomock "github.com/petergtz/pegomock" + models "github.com/runatlantis/atlantis/server/events/models" + "reflect" + "time" +) + +type MockProjectJobURLGenerator struct { + fail func(message string, callerSkip ...int) +} + +func NewMockProjectJobURLGenerator(options ...pegomock.Option) *MockProjectJobURLGenerator { + mock := &MockProjectJobURLGenerator{} + for _, option := range options { + option.Apply(mock) + } + return mock +} + +func (mock *MockProjectJobURLGenerator) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } +func (mock *MockProjectJobURLGenerator) FailHandler() pegomock.FailHandler { return mock.fail } + +func (mock *MockProjectJobURLGenerator) GenerateProjectJobURL(p models.ProjectCommandContext) (string, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectJobURLGenerator().") + } + params := []pegomock.Param{p} + result := pegomock.GetGenericMockFrom(mock).Invoke("GenerateProjectJobURL", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 string + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(string) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 +} + +func (mock *MockProjectJobURLGenerator) VerifyWasCalledOnce() *VerifierMockProjectJobURLGenerator { + return &VerifierMockProjectJobURLGenerator{ + mock: mock, + invocationCountMatcher: pegomock.Times(1), + } +} + +func (mock *MockProjectJobURLGenerator) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockProjectJobURLGenerator { + return &VerifierMockProjectJobURLGenerator{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + } +} + +func (mock *MockProjectJobURLGenerator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockProjectJobURLGenerator { + return &VerifierMockProjectJobURLGenerator{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + inOrderContext: inOrderContext, + } +} + +func (mock *MockProjectJobURLGenerator) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockProjectJobURLGenerator { + return &VerifierMockProjectJobURLGenerator{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + timeout: timeout, + } +} + +type VerifierMockProjectJobURLGenerator struct { + mock *MockProjectJobURLGenerator + invocationCountMatcher pegomock.InvocationCountMatcher + inOrderContext *pegomock.InOrderContext + timeout time.Duration +} + +func (verifier *VerifierMockProjectJobURLGenerator) GenerateProjectJobURL(p models.ProjectCommandContext) *MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification { + params := []pegomock.Param{p} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GenerateProjectJobURL", params, verifier.timeout) + return &MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification struct { + mock *MockProjectJobURLGenerator + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification) GetCapturedArguments() models.ProjectCommandContext { + p := c.GetAllCapturedArguments() + return p[len(p)-1] +} + +func (c *MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification) GetAllCapturedArguments() (_param0 []models.ProjectCommandContext) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]models.ProjectCommandContext, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(models.ProjectCommandContext) + } + } + return +} diff --git a/server/handlers/mocks/mock_project_status_updater.go b/server/handlers/mocks/mock_project_status_updater.go new file mode 100644 index 0000000000..dd5a42d40a --- /dev/null +++ b/server/handlers/mocks/mock_project_status_updater.go @@ -0,0 +1,117 @@ +// Code generated by pegomock. DO NOT EDIT. +// Source: github.com/runatlantis/atlantis/server/handlers (interfaces: ProjectStatusUpdater) + +package mocks + +import ( + pegomock "github.com/petergtz/pegomock" + models "github.com/runatlantis/atlantis/server/events/models" + "reflect" + "time" +) + +type MockProjectStatusUpdater struct { + fail func(message string, callerSkip ...int) +} + +func NewMockProjectStatusUpdater(options ...pegomock.Option) *MockProjectStatusUpdater { + mock := &MockProjectStatusUpdater{} + for _, option := range options { + option.Apply(mock) + } + return mock +} + +func (mock *MockProjectStatusUpdater) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } +func (mock *MockProjectStatusUpdater) FailHandler() pegomock.FailHandler { return mock.fail } + +func (mock *MockProjectStatusUpdater) UpdateProject(ctx models.ProjectCommandContext, cmdName models.CommandName, status models.CommitStatus, url string) error { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockProjectStatusUpdater().") + } + params := []pegomock.Param{ctx, cmdName, status, url} + result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateProject", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(error) + } + } + return ret0 +} + +func (mock *MockProjectStatusUpdater) VerifyWasCalledOnce() *VerifierMockProjectStatusUpdater { + return &VerifierMockProjectStatusUpdater{ + mock: mock, + invocationCountMatcher: pegomock.Times(1), + } +} + +func (mock *MockProjectStatusUpdater) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockProjectStatusUpdater { + return &VerifierMockProjectStatusUpdater{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + } +} + +func (mock *MockProjectStatusUpdater) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockProjectStatusUpdater { + return &VerifierMockProjectStatusUpdater{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + inOrderContext: inOrderContext, + } +} + +func (mock *MockProjectStatusUpdater) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockProjectStatusUpdater { + return &VerifierMockProjectStatusUpdater{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + timeout: timeout, + } +} + +type VerifierMockProjectStatusUpdater struct { + mock *MockProjectStatusUpdater + invocationCountMatcher pegomock.InvocationCountMatcher + inOrderContext *pegomock.InOrderContext + timeout time.Duration +} + +func (verifier *VerifierMockProjectStatusUpdater) UpdateProject(ctx models.ProjectCommandContext, cmdName models.CommandName, status models.CommitStatus, url string) *MockProjectStatusUpdater_UpdateProject_OngoingVerification { + params := []pegomock.Param{ctx, cmdName, status, url} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateProject", params, verifier.timeout) + return &MockProjectStatusUpdater_UpdateProject_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockProjectStatusUpdater_UpdateProject_OngoingVerification struct { + mock *MockProjectStatusUpdater + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockProjectStatusUpdater_UpdateProject_OngoingVerification) GetCapturedArguments() (models.ProjectCommandContext, models.CommandName, models.CommitStatus, string) { + ctx, cmdName, status, url := c.GetAllCapturedArguments() + return ctx[len(ctx)-1], cmdName[len(cmdName)-1], status[len(status)-1], url[len(url)-1] +} + +func (c *MockProjectStatusUpdater_UpdateProject_OngoingVerification) GetAllCapturedArguments() (_param0 []models.ProjectCommandContext, _param1 []models.CommandName, _param2 []models.CommitStatus, _param3 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]models.ProjectCommandContext, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(models.ProjectCommandContext) + } + _param1 = make([]models.CommandName, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.CommandName) + } + _param2 = make([]models.CommitStatus, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.CommitStatus) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) + } + } + return +} diff --git a/server/handlers/mocks/mock_resource_cleaner.go b/server/handlers/mocks/mock_resource_cleaner.go new file mode 100644 index 0000000000..430dd2709f --- /dev/null +++ b/server/handlers/mocks/mock_resource_cleaner.go @@ -0,0 +1,97 @@ +// Code generated by pegomock. DO NOT EDIT. +// Source: github.com/runatlantis/atlantis/server/handlers (interfaces: ResourceCleaner) + +package mocks + +import ( + pegomock "github.com/petergtz/pegomock" + "reflect" + "time" +) + +type MockResourceCleaner struct { + fail func(message string, callerSkip ...int) +} + +func NewMockResourceCleaner(options ...pegomock.Option) *MockResourceCleaner { + mock := &MockResourceCleaner{} + for _, option := range options { + option.Apply(mock) + } + return mock +} + +func (mock *MockResourceCleaner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } +func (mock *MockResourceCleaner) FailHandler() pegomock.FailHandler { return mock.fail } + +func (mock *MockResourceCleaner) CleanUp(_param0 string) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockResourceCleaner().") + } + params := []pegomock.Param{_param0} + pegomock.GetGenericMockFrom(mock).Invoke("CleanUp", params, []reflect.Type{}) +} + +func (mock *MockResourceCleaner) VerifyWasCalledOnce() *VerifierMockResourceCleaner { + return &VerifierMockResourceCleaner{ + mock: mock, + invocationCountMatcher: pegomock.Times(1), + } +} + +func (mock *MockResourceCleaner) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockResourceCleaner { + return &VerifierMockResourceCleaner{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + } +} + +func (mock *MockResourceCleaner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockResourceCleaner { + return &VerifierMockResourceCleaner{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + inOrderContext: inOrderContext, + } +} + +func (mock *MockResourceCleaner) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockResourceCleaner { + return &VerifierMockResourceCleaner{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + timeout: timeout, + } +} + +type VerifierMockResourceCleaner struct { + mock *MockResourceCleaner + invocationCountMatcher pegomock.InvocationCountMatcher + inOrderContext *pegomock.InOrderContext + timeout time.Duration +} + +func (verifier *VerifierMockResourceCleaner) CleanUp(_param0 string) *MockResourceCleaner_CleanUp_OngoingVerification { + params := []pegomock.Param{_param0} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CleanUp", params, verifier.timeout) + return &MockResourceCleaner_CleanUp_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockResourceCleaner_CleanUp_OngoingVerification struct { + mock *MockResourceCleaner + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockResourceCleaner_CleanUp_OngoingVerification) GetCapturedArguments() string { + _param0 := c.GetAllCapturedArguments() + return _param0[len(_param0)-1] +} + +func (c *MockResourceCleaner_CleanUp_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(string) + } + } + return +} diff --git a/server/handlers/project_command_output_handler.go b/server/handlers/project_command_output_handler.go new file mode 100644 index 0000000000..b530aa5ef3 --- /dev/null +++ b/server/handlers/project_command_output_handler.go @@ -0,0 +1,232 @@ +package handlers + +import ( + "sync" + + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" +) + +// AsyncProjectCommandOutputHandler is a handler to transport terraform client +// outputs to the front end. +type AsyncProjectCommandOutputHandler struct { + projectCmdOutput chan *models.ProjectCmdOutputLine + + projectOutputBuffers map[string][]string + projectOutputBuffersLock sync.RWMutex + + receiverBuffers map[string]map[chan string]bool + receiverBuffersLock sync.RWMutex + + projectStatusUpdater ProjectStatusUpdater + projectJobURLGenerator ProjectJobURLGenerator + + logger logging.SimpleLogging +} + +//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_job_url_generator.go ProjectJobURLGenerator + +// ProjectJobURLGenerator generates urls to view project's progress. +type ProjectJobURLGenerator interface { + GenerateProjectJobURL(p models.ProjectCommandContext) (string, error) +} + +//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_status_updater.go ProjectStatusUpdater + +type ProjectStatusUpdater interface { + // UpdateProject sets the commit status for the project represented by + // ctx. + UpdateProject(ctx models.ProjectCommandContext, cmdName models.CommandName, status models.CommitStatus, url string) error +} + +//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_command_output_handler.go ProjectCommandOutputHandler + +type ProjectCommandOutputHandler interface { + // Clear clears the buffer from previous terraform output lines + Clear(ctx models.ProjectCommandContext) + + // Send will enqueue the msg and wait for Handle() to receive the message. + Send(ctx models.ProjectCommandContext, msg string) + + // Register registers a channel and blocks until it is caught up. Callers should call this asynchronously when attempting + // to read the channel in the same goroutine + Register(projectInfo string, receiver chan string) + + // Deregister removes a channel from successive updates and closes it. + Deregister(projectInfo string, receiver chan string) + + // Listens for msg from channel + Handle() + + // SetJobURLWithStatus sets the commit status for the project represented by + // ctx and updates the status with and url to a job. + SetJobURLWithStatus(ctx models.ProjectCommandContext, cmdName models.CommandName, status models.CommitStatus) error + + ResourceCleaner +} + +//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_resource_cleaner.go ResourceCleaner + +type ResourceCleaner interface { + CleanUp(pull string) +} + +func NewAsyncProjectCommandOutputHandler( + projectCmdOutput chan *models.ProjectCmdOutputLine, + projectStatusUpdater ProjectStatusUpdater, + projectJobURLGenerator ProjectJobURLGenerator, + logger logging.SimpleLogging, +) ProjectCommandOutputHandler { + return &AsyncProjectCommandOutputHandler{ + projectCmdOutput: projectCmdOutput, + logger: logger, + receiverBuffers: map[string]map[chan string]bool{}, + projectStatusUpdater: projectStatusUpdater, + projectJobURLGenerator: projectJobURLGenerator, + projectOutputBuffers: map[string][]string{}, + } +} + +func (p *AsyncProjectCommandOutputHandler) Send(ctx models.ProjectCommandContext, msg string) { + p.projectCmdOutput <- &models.ProjectCmdOutputLine{ + ProjectInfo: ctx.PullInfo(), + Line: msg, + } +} + +func (p *AsyncProjectCommandOutputHandler) Register(projectInfo string, receiver chan string) { + p.addChan(receiver, projectInfo) +} + +func (p *AsyncProjectCommandOutputHandler) Handle() { + for msg := range p.projectCmdOutput { + if msg.ClearBuffBefore { + p.clearLogLines(msg.ProjectInfo) + } + p.writeLogLine(msg.ProjectInfo, msg.Line) + } +} + +func (p *AsyncProjectCommandOutputHandler) Clear(ctx models.ProjectCommandContext) { + p.projectCmdOutput <- &models.ProjectCmdOutputLine{ + ProjectInfo: ctx.PullInfo(), + Line: models.LogStreamingClearMsg, + ClearBuffBefore: true, + } +} + +func (p *AsyncProjectCommandOutputHandler) SetJobURLWithStatus(ctx models.ProjectCommandContext, cmdName models.CommandName, status models.CommitStatus) error { + url, err := p.projectJobURLGenerator.GenerateProjectJobURL(ctx) + + if err != nil { + return err + } + return p.projectStatusUpdater.UpdateProject(ctx, cmdName, status, url) +} + +func (p *AsyncProjectCommandOutputHandler) clearLogLines(pull string) { + p.projectOutputBuffersLock.Lock() + delete(p.projectOutputBuffers, pull) + p.projectOutputBuffersLock.Unlock() +} + +func (p *AsyncProjectCommandOutputHandler) addChan(ch chan string, pull string) { + p.projectOutputBuffersLock.RLock() + buffer := p.projectOutputBuffers[pull] + p.projectOutputBuffersLock.RUnlock() + + for _, line := range buffer { + ch <- line + } + + // add the channel to our registry after we backfill the contents of the buffer, + // to prevent new messages coming in interleaving with this backfill. + p.receiverBuffersLock.Lock() + if p.receiverBuffers[pull] == nil { + p.receiverBuffers[pull] = map[chan string]bool{} + } + p.receiverBuffers[pull][ch] = true + p.receiverBuffersLock.Unlock() +} + +//Add log line to buffer and send to all current channels +func (p *AsyncProjectCommandOutputHandler) writeLogLine(pull string, line string) { + p.receiverBuffersLock.Lock() + for ch := range p.receiverBuffers[pull] { + select { + case ch <- line: + default: + // Client ws conn could be closed in two ways: + // 1. Client closes the conn gracefully -> the closeHandler() is executed which + // closes the channel and cleans up resources. + // 2. Client does not close the conn and the closeHandler() is not executed -> the + // receiverChan will be blocking for N number of messages (equal to buffer size) + // before we delete the channel and clean up the resources. + delete(p.receiverBuffers[pull], ch) + } + } + p.receiverBuffersLock.Unlock() + + // No need to write to projectOutputBuffers if clear msg. + if line == models.LogStreamingClearMsg { + return + } + + p.projectOutputBuffersLock.Lock() + if p.projectOutputBuffers[pull] == nil { + p.projectOutputBuffers[pull] = []string{} + } + p.projectOutputBuffers[pull] = append(p.projectOutputBuffers[pull], line) + p.projectOutputBuffersLock.Unlock() +} + +//Remove channel, so client no longer receives Terraform output +func (p *AsyncProjectCommandOutputHandler) Deregister(pull string, ch chan string) { + p.logger.Debug("Removing channel for %s", pull) + p.receiverBuffersLock.Lock() + delete(p.receiverBuffers[pull], ch) + p.receiverBuffersLock.Unlock() +} + +func (p *AsyncProjectCommandOutputHandler) GetReceiverBufferForPull(pull string) map[chan string]bool { + return p.receiverBuffers[pull] +} + +func (p *AsyncProjectCommandOutputHandler) GetProjectOutputBuffer(pull string) []string { + return p.projectOutputBuffers[pull] +} + +func (p *AsyncProjectCommandOutputHandler) CleanUp(pull string) { + p.projectOutputBuffersLock.Lock() + delete(p.projectOutputBuffers, pull) + p.projectOutputBuffersLock.Unlock() + + // Only delete the pull record from receiver buffers. + // WS channel will be closed when the user closes the browser tab + // in closeHanlder(). + p.receiverBuffersLock.Lock() + delete(p.receiverBuffers, pull) + p.receiverBuffersLock.Unlock() +} + +// NoopProjectOutputHandler is a mock that doesn't do anything +type NoopProjectOutputHandler struct{} + +func (p *NoopProjectOutputHandler) Send(ctx models.ProjectCommandContext, msg string) { +} + +func (p *NoopProjectOutputHandler) Register(projectInfo string, receiver chan string) {} +func (p *NoopProjectOutputHandler) Deregister(projectInfo string, receiver chan string) {} + +func (p *NoopProjectOutputHandler) Handle() { +} + +func (p *NoopProjectOutputHandler) Clear(ctx models.ProjectCommandContext) { +} + +func (p *NoopProjectOutputHandler) SetJobURLWithStatus(ctx models.ProjectCommandContext, cmdName models.CommandName, status models.CommitStatus) error { + return nil +} + +func (p *NoopProjectOutputHandler) CleanUp(pull string) { +} diff --git a/server/handlers/project_command_output_handler_test.go b/server/handlers/project_command_output_handler_test.go new file mode 100644 index 0000000000..f644881815 --- /dev/null +++ b/server/handlers/project_command_output_handler_test.go @@ -0,0 +1,216 @@ +package handlers_test + +import ( + "errors" + "sync" + "testing" + + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/handlers" + "github.com/runatlantis/atlantis/server/handlers/mocks" + "github.com/runatlantis/atlantis/server/handlers/mocks/matchers" + "github.com/runatlantis/atlantis/server/logging" + "github.com/stretchr/testify/assert" + + . "github.com/petergtz/pegomock" + . "github.com/runatlantis/atlantis/testing" +) + +func createTestProjectCmdContext(t *testing.T) models.ProjectCommandContext { + logger := logging.NewNoopLogger(t) + return models.ProjectCommandContext{ + BaseRepo: models.Repo{ + Name: "test-repo", + Owner: "test-org", + }, + HeadRepo: models.Repo{ + Name: "test-repo", + Owner: "test-org", + }, + Pull: models.PullRequest{ + Num: 1, + HeadBranch: "master", + BaseBranch: "master", + Author: "test-user", + }, + User: models.User{ + Username: "test-user", + }, + Log: logger, + Workspace: "myworkspace", + RepoRelDir: "test-dir", + ProjectName: "test-project", + } +} + +func createProjectCommandOutputHandler(t *testing.T) handlers.ProjectCommandOutputHandler { + logger := logging.NewNoopLogger(t) + prjCmdOutputChan := make(chan *models.ProjectCmdOutputLine) + projectStatusUpdater := mocks.NewMockProjectStatusUpdater() + projectJobURLGenerator := mocks.NewMockProjectJobURLGenerator() + prjCmdOutputHandler := handlers.NewAsyncProjectCommandOutputHandler( + prjCmdOutputChan, + projectStatusUpdater, + projectJobURLGenerator, + logger, + ) + + go func() { + prjCmdOutputHandler.Handle() + }() + + return prjCmdOutputHandler +} + +func TestProjectCommandOutputHandler(t *testing.T) { + Msg := "Test Terraform Output" + ctx := createTestProjectCmdContext(t) + + t.Run("receive message from main channel", func(t *testing.T) { + var wg sync.WaitGroup + var expectedMsg string + projectOutputHandler := createProjectCommandOutputHandler(t) + + ch := make(chan string) + + // register channel and backfill from buffer + // Note: We call this synchronously because otherwise + // there could be a race where we are unable to register the channel + // before sending messages due to the way we lock our buffer memory cache + projectOutputHandler.Register(ctx.PullInfo(), ch) + + wg.Add(1) + + // read from channel + go func() { + for msg := range ch { + expectedMsg = msg + wg.Done() + } + }() + + projectOutputHandler.Send(ctx, Msg) + wg.Wait() + close(ch) + + // Wait for the msg to be read. + wg.Wait() + Equals(t, expectedMsg, Msg) + }) + + t.Run("clear buffer", func(t *testing.T) { + var wg sync.WaitGroup + + projectOutputHandler := createProjectCommandOutputHandler(t) + + ch := make(chan string) + + // register channel and backfill from buffer + // Note: We call this synchronously because otherwise + // there could be a race where we are unable to register the channel + // before sending messages due to the way we lock our buffer memory cache + projectOutputHandler.Register(ctx.PullInfo(), ch) + + wg.Add(1) + // read from channel asynchronously + go func() { + for msg := range ch { + // we are done once we receive the clear message. + // prior message doesn't matter for this test. + if msg == models.LogStreamingClearMsg { + wg.Done() + } + } + }() + + // send regular message followed by clear message + projectOutputHandler.Send(ctx, Msg) + projectOutputHandler.Clear(ctx) + wg.Wait() + close(ch) + + dfProjectOutputHandler, ok := projectOutputHandler.(*handlers.AsyncProjectCommandOutputHandler) + assert.True(t, ok) + + assert.Empty(t, dfProjectOutputHandler.GetProjectOutputBuffer(ctx.PullInfo())) + }) + + t.Run("copies buffer to new channels", func(t *testing.T) { + var wg sync.WaitGroup + + projectOutputHandler := createProjectCommandOutputHandler(t) + + // send first message to populated the buffer + projectOutputHandler.Send(ctx, Msg) + + ch := make(chan string) + + receivedMsgs := []string{} + + wg.Add(1) + // read from channel asynchronously + go func() { + for msg := range ch { + receivedMsgs = append(receivedMsgs, msg) + + // we're only expecting two messages here. + if len(receivedMsgs) >= 2 { + wg.Done() + } + } + }() + // register channel and backfill from buffer + // Note: We call this synchronously because otherwise + // there could be a race where we are unable to register the channel + // before sending messages due to the way we lock our buffer memory cache + projectOutputHandler.Register(ctx.PullInfo(), ch) + + projectOutputHandler.Send(ctx, Msg) + wg.Wait() + close(ch) + + expectedMsgs := []string{Msg, Msg} + assert.Equal(t, len(expectedMsgs), len(receivedMsgs)) + for i := range expectedMsgs { + assert.Equal(t, expectedMsgs[i], receivedMsgs[i]) + } + }) + + t.Run("update project status with project jobs url", func(t *testing.T) { + RegisterMockTestingT(t) + logger := logging.NewNoopLogger(t) + prjCmdOutputChan := make(chan *models.ProjectCmdOutputLine) + projectStatusUpdater := mocks.NewMockProjectStatusUpdater() + projectJobURLGenerator := mocks.NewMockProjectJobURLGenerator() + prjCmdOutputHandler := handlers.NewAsyncProjectCommandOutputHandler( + prjCmdOutputChan, + projectStatusUpdater, + projectJobURLGenerator, + logger, + ) + + When(projectJobURLGenerator.GenerateProjectJobURL(matchers.EqModelsProjectCommandContext(ctx))).ThenReturn("url-to-project-jobs", nil) + err := prjCmdOutputHandler.SetJobURLWithStatus(ctx, models.PlanCommand, models.PendingCommitStatus) + Ok(t, err) + + projectStatusUpdater.VerifyWasCalledOnce().UpdateProject(ctx, models.PlanCommand, models.PendingCommitStatus, "url-to-project-jobs") + }) + + t.Run("update project status with project jobs url error", func(t *testing.T) { + RegisterMockTestingT(t) + logger := logging.NewNoopLogger(t) + prjCmdOutputChan := make(chan *models.ProjectCmdOutputLine) + projectStatusUpdater := mocks.NewMockProjectStatusUpdater() + projectJobURLGenerator := mocks.NewMockProjectJobURLGenerator() + prjCmdOutputHandler := handlers.NewAsyncProjectCommandOutputHandler( + prjCmdOutputChan, + projectStatusUpdater, + projectJobURLGenerator, + logger, + ) + + When(projectJobURLGenerator.GenerateProjectJobURL(matchers.EqModelsProjectCommandContext(ctx))).ThenReturn("url-to-project-jobs", errors.New("some error")) + err := prjCmdOutputHandler.SetJobURLWithStatus(ctx, models.PlanCommand, models.PendingCommitStatus) + assert.Error(t, err) + }) +} diff --git a/server/handlers/websocket_handler.go b/server/handlers/websocket_handler.go new file mode 100644 index 0000000000..3b98a5a54c --- /dev/null +++ b/server/handlers/websocket_handler.go @@ -0,0 +1,61 @@ +package handlers + +import ( + "net/http" + + "github.com/gorilla/websocket" + "github.com/runatlantis/atlantis/server/logging" +) + +//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_websocket_handler.go WebsocketHandler + +type WebsocketHandler interface { + Upgrade(w http.ResponseWriter, r *http.Request, responseHeader http.Header) (WebsocketConnectionWrapper, error) + SetReadHandler(w WebsocketConnectionWrapper) + SetCloseHandler(w WebsocketConnectionWrapper, receiver chan string) +} + +//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_websocket_connection_wrapper.go WebsocketConnectionWrapper + +type WebsocketConnectionWrapper interface { + ReadMessage() (messageType int, p []byte, err error) + WriteMessage(messageType int, data []byte) error + SetCloseHandler(h func(code int, text string) error) +} + +type DefaultWebsocketHandler struct { + handler websocket.Upgrader + Logger logging.SimpleLogging +} + +func NewWebsocketHandler(logger logging.SimpleLogging) WebsocketHandler { + h := websocket.Upgrader{} + h.CheckOrigin = func(r *http.Request) bool { return true } + return &DefaultWebsocketHandler{ + handler: h, + Logger: logger, + } +} + +func (wh *DefaultWebsocketHandler) Upgrade(w http.ResponseWriter, r *http.Request, responseHeader http.Header) (WebsocketConnectionWrapper, error) { + return wh.handler.Upgrade(w, r, responseHeader) +} + +func (wh *DefaultWebsocketHandler) SetReadHandler(w WebsocketConnectionWrapper) { + for { + _, _, err := w.ReadMessage() + if err != nil { + wh.Logger.Warn("Failed to read WS message: %s", err) + return + } + } +} + +func (wh *DefaultWebsocketHandler) SetCloseHandler(w WebsocketConnectionWrapper, receiver chan string) { + w.SetCloseHandler(func(code int, text string) error { + // Close the channnel after websocket connection closed. + // Will gracefully exit the ProjectCommandOutputHandler.Receive() call and cleanup. + close(receiver) + return nil + }) +} diff --git a/server/recovery/recovery.go b/server/recovery/recovery.go index aff293fcf0..1039ef7503 100644 --- a/server/recovery/recovery.go +++ b/server/recovery/recovery.go @@ -22,7 +22,7 @@ package recovery import ( "bytes" "fmt" - "io/ioutil" + "os" "runtime" ) @@ -48,7 +48,7 @@ func Stack(skip int) []byte { // Print this much at least. If we can't find the source, it won't show. fmt.Fprintf(buf, "%s:%d (0x%x)\n", file, line, pc) if file != lastFile { - data, err := ioutil.ReadFile(file) // nolint: gosec + data, err := os.ReadFile(file) // nolint: gosec if err != nil { continue } diff --git a/server/router.go b/server/router.go index 18e1055949..5607294703 100644 --- a/server/router.go +++ b/server/router.go @@ -1,9 +1,12 @@ package server import ( + "fmt" "net/url" "github.com/gorilla/mux" + "github.com/pkg/errors" + "github.com/runatlantis/atlantis/server/events/models" ) // Router can be used to retrieve Atlantis URLs. It acts as an intermediary @@ -15,6 +18,8 @@ type Router struct { // LockViewRouteName is the named route for the lock view that can be Get'd // from the Underlying router. LockViewRouteName string + // ProjectJobsViewRouteName is the named route for the projects active jobs + ProjectJobsViewRouteName string // LockViewRouteIDQueryParam is the query parameter needed to construct the // lock view: underlying.Get(LockViewRouteName).URL(LockViewRouteIDQueryParam, "my id"). LockViewRouteIDQueryParam string @@ -33,3 +38,20 @@ func (r *Router) GenerateLockURL(lockID string) string { // golang likes to double escape the lockURL path when using url.Parse(). return r.AtlantisURL.String() + lockURL.String() } + +func (r *Router) GenerateProjectJobURL(ctx models.ProjectCommandContext) (string, error) { + pull := ctx.Pull + projectIdentifier := models.GetProjectIdentifier(ctx.RepoRelDir, ctx.ProjectName) + jobURL, err := r.Underlying.Get(r.ProjectJobsViewRouteName).URL( + "org", pull.BaseRepo.Owner, + "repo", pull.BaseRepo.Name, + "pull", fmt.Sprintf("%d", pull.Num), + "project", projectIdentifier, + "workspace", ctx.Workspace, + ) + if err != nil { + return "", errors.Wrapf(err, "creating job url for %s/%d/%s/%s", pull.BaseRepo.FullName, pull.Num, projectIdentifier, ctx.Workspace) + } + + return r.AtlantisURL.String() + jobURL.String(), nil +} diff --git a/server/router_test.go b/server/router_test.go index 3a79d56404..ccabee44de 100644 --- a/server/router_test.go +++ b/server/router_test.go @@ -6,6 +6,7 @@ import ( "github.com/gorilla/mux" "github.com/runatlantis/atlantis/server" + "github.com/runatlantis/atlantis/server/events/models" . "github.com/runatlantis/atlantis/testing" ) @@ -60,3 +61,57 @@ func TestRouter_GenerateLockURL(t *testing.T) { }) } } + +func setupJobsRouter(t *testing.T) *server.Router { + atlantisURL, err := server.ParseAtlantisURL("http://localhost:4141") + Ok(t, err) + + underlyingRouter := mux.NewRouter() + underlyingRouter.HandleFunc("/jobs/{org}/{repo}/{pull}/{project}/{workspace}", func(_ http.ResponseWriter, _ *http.Request) {}).Methods("GET").Name("project-jobs-detail") + + return &server.Router{ + AtlantisURL: atlantisURL, + Underlying: underlyingRouter, + ProjectJobsViewRouteName: "project-jobs-detail", + } +} + +func TestGenerateProjectJobURL_ShouldGenerateURLWithProjectNameWhenProjectNameSpecified(t *testing.T) { + router := setupJobsRouter(t) + ctx := models.ProjectCommandContext{ + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + Owner: "test-owner", + Name: "test-repo", + }, + Num: 1, + }, + ProjectName: "test-project", + Workspace: "default", + } + expectedURL := "http://localhost:4141/jobs/test-owner/test-repo/1/test-project/default" + gotURL, err := router.GenerateProjectJobURL(ctx) + Ok(t, err) + + Equals(t, expectedURL, gotURL) +} + +func TestGenerateProjectJobURL_ShouldGenerateURLWithDirectoryAndWorkspaceWhenProjectNameNotSpecified(t *testing.T) { + router := setupJobsRouter(t) + ctx := models.ProjectCommandContext{ + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + Owner: "test-owner", + Name: "test-repo", + }, + Num: 1, + }, + RepoRelDir: "ops/terraform/test-root", + Workspace: "default", + } + expectedURL := "http://localhost:4141/jobs/test-owner/test-repo/1/ops-terraform-test-root/default" + gotURL, err := router.GenerateProjectJobURL(ctx) + Ok(t, err) + + Equals(t, expectedURL, gotURL) +} diff --git a/server/server.go b/server/server.go index 5c8324839b..c29af04774 100644 --- a/server/server.go +++ b/server/server.go @@ -20,7 +20,6 @@ import ( "encoding/json" "flag" "fmt" - "io/ioutil" "log" "net/http" "net/url" @@ -35,6 +34,7 @@ import ( "github.com/mitchellh/go-homedir" "github.com/runatlantis/atlantis/server/core/db" "github.com/runatlantis/atlantis/server/events/yaml/valid" + "github.com/runatlantis/atlantis/server/handlers" assetfs "github.com/elazarl/go-bindata-assetfs" "github.com/gorilla/mux" @@ -42,6 +42,7 @@ import ( "github.com/runatlantis/atlantis/server/controllers" events_controllers "github.com/runatlantis/atlantis/server/controllers/events" "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/websocket" "github.com/runatlantis/atlantis/server/core/locking" "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/core/runtime/policy" @@ -68,11 +69,11 @@ const ( // route. ex: // mux.Router.Get(LockViewRouteName).URL(LockViewRouteIDQueryParam, "my id") LockViewRouteIDQueryParam = "id" - + // ProjectJobsViewRouteName is the named route in mux.Router for the log stream view. + ProjectJobsViewRouteName = "project-jobs-detail" // binDirName is the name of the directory inside our data dir where // we download binaries. BinDirName = "bin" - // terraformPluginCacheDir is the name of the dir inside our data dir // where we tell terraform to cache plugins and modules. TerraformPluginCacheDirName = "plugin-cache" @@ -93,11 +94,15 @@ type Server struct { GithubAppController *controllers.GithubAppController LocksController *controllers.LocksController StatusController *controllers.StatusController + JobsController *controllers.JobsController IndexTemplate templates.TemplateWriter LockDetailTemplate templates.TemplateWriter + ProjectJobsTemplate templates.TemplateWriter + ProjectJobsErrorTemplate templates.TemplateWriter SSLCertFile string SSLKeyFile string Drainer *events.Drainer + ProjectCmdOutputHandler handlers.ProjectCommandOutputHandler } // Config holds config for server that isn't passed in by the user. @@ -158,7 +163,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { Token: userConfig.GithubToken, } } else if userConfig.GithubAppID != 0 && userConfig.GithubAppKeyFile != "" { - privateKey, err := ioutil.ReadFile(userConfig.GithubAppKeyFile) + privateKey, err := os.ReadFile(userConfig.GithubAppKeyFile) if err != nil { return nil, err } @@ -180,7 +185,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { } var err error - githubClient, err = vcs.NewGithubClient(userConfig.GithubHostname, githubCredentials, logger) + githubClient, err = vcs.NewGithubClient(userConfig.GithubHostname, githubCredentials, logger, userConfig.VCSStatusName) if err != nil { return nil, err } @@ -217,8 +222,9 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { } if userConfig.AzureDevopsUser != "" { supportedVCSHosts = append(supportedVCSHosts, models.AzureDevops) + var err error - azuredevopsClient, err = vcs.NewAzureDevopsClient("dev.azure.com", userConfig.AzureDevopsUser, userConfig.AzureDevopsToken) + azuredevopsClient, err = vcs.NewAzureDevopsClient(userConfig.AzureDevOpsHostname, userConfig.AzureDevopsUser, userConfig.AzureDevopsToken) if err != nil { return nil, err } @@ -272,8 +278,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { return nil, errors.Wrap(err, "initializing webhooks") } vcsClient := vcs.NewClientProxy(githubClient, gitlabClient, bitbucketCloudClient, bitbucketServerClient, azuredevopsClient) - commitStatusUpdater := &events.DefaultCommitStatusUpdater{Client: vcsClient, StatusName: userConfig.VCSStatusName} - + commitStatusUpdater := &events.DefaultCommitStatusUpdater{Client: vcsClient, TitleBuilder: vcs.StatusTitleBuilder{TitlePrefix: userConfig.VCSStatusName}} binDir, err := mkSubDir(userConfig.DataDir, BinDirName) if err != nil { @@ -286,6 +291,36 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { return nil, err } + parsedURL, err := ParseAtlantisURL(userConfig.AtlantisURL) + if err != nil { + return nil, errors.Wrapf(err, + "parsing --%s flag %q", config.AtlantisURLFlag, userConfig.AtlantisURL) + } + + underlyingRouter := mux.NewRouter() + router := &Router{ + AtlantisURL: parsedURL, + LockViewRouteIDQueryParam: LockViewRouteIDQueryParam, + LockViewRouteName: LockViewRouteName, + ProjectJobsViewRouteName: ProjectJobsViewRouteName, + Underlying: underlyingRouter, + } + + var projectCmdOutputHandler handlers.ProjectCommandOutputHandler + // When TFE is enabled log streaming is not necessary. + + if userConfig.TFEToken != "" { + projectCmdOutputHandler = &handlers.NoopProjectOutputHandler{} + } else { + projectCmdOutput := make(chan *models.ProjectCmdOutputLine) + projectCmdOutputHandler = handlers.NewAsyncProjectCommandOutputHandler( + projectCmdOutput, + commitStatusUpdater, + router, + logger, + ) + } + terraformClient, err := terraform.NewClient( logger, binDir, @@ -296,7 +331,8 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { config.DefaultTFVersionFlag, userConfig.TFDownloadURL, &terraform.DefaultDownloader{}, - true) + true, + projectCmdOutputHandler) // The flag.Lookup call is to detect if we're running in a unit test. If we // are, then we don't error out because we don't have/want terraform // installed on our CI system where the unit tests run. @@ -354,11 +390,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { DB: boltdb, } - parsedURL, err := ParseAtlantisURL(userConfig.AtlantisURL) - if err != nil { - return nil, errors.Wrapf(err, - "parsing --%s flag %q", config.AtlantisURLFlag, userConfig.AtlantisURL) - } validator := &yaml.ParserValidator{} globalCfg := valid.NewGlobalCfgFromArgs( @@ -381,19 +412,14 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { } } - underlyingRouter := mux.NewRouter() - router := &Router{ - AtlantisURL: parsedURL, - LockViewRouteIDQueryParam: LockViewRouteIDQueryParam, - LockViewRouteName: LockViewRouteName, - Underlying: underlyingRouter, - } pullClosedExecutor := &events.PullClosedExecutor{ - VCSClient: vcsClient, - Locker: lockingClient, - WorkingDir: workingDir, - Logger: logger, - DB: boltdb, + VCSClient: vcsClient, + Locker: lockingClient, + WorkingDir: workingDir, + Logger: logger, + DB: boltdb, + LogStreamResourceCleaner: projectCmdOutputHandler, + PullClosedTemplate: &events.PullClosedEventTemplate{}, } eventParser := &events.EventParser{ GithubUser: userConfig.GithubUser, @@ -464,8 +490,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { } applyRequirementHandler := &events.AggregateApplyRequirements{ - PullApprovedChecker: vcsClient, - WorkingDir: workingDir, + WorkingDir: workingDir, } projectCommandRunner := &events.DefaultProjectCommandRunner{ @@ -485,6 +510,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { PolicyCheckStepRunner: policyCheckRunner, ApplyStepRunner: &runtime.ApplyStepRunner{ TerraformExecutor: terraformClient, + DefaultTFVersion: defaultTfVersion, CommitStatusUpdater: commitStatusUpdater, AsyncTFExec: terraformClient, }, @@ -517,11 +543,16 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { GlobalAutomerge: userConfig.Automerge, } + projectOutputWrapper := &events.ProjectOutputWrapper{ + ProjectCmdOutputHandler: projectCmdOutputHandler, + ProjectCommandRunner: projectCommandRunner, + } + policyCheckCommandRunner := events.NewPolicyCheckCommandRunner( dbUpdater, pullUpdater, commitStatusUpdater, - projectCommandRunner, + projectOutputWrapper, userConfig.ParallelPoolSize, userConfig.SilenceVCSStatusNoProjects, ) @@ -534,7 +565,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { workingDir, commitStatusUpdater, projectCommandBuilder, - projectCommandRunner, + projectOutputWrapper, dbUpdater, pullUpdater, policyCheckCommandRunner, @@ -544,13 +575,14 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { boltdb, ) + pullReqStatusFetcher := vcs.NewPullReqStatusFetcher(vcsClient) applyCommandRunner := events.NewApplyCommandRunner( vcsClient, userConfig.DisableApplyAll, applyLockingClient, commitStatusUpdater, projectCommandBuilder, - projectCommandRunner, + projectOutputWrapper, autoMerger, pullUpdater, dbUpdater, @@ -558,12 +590,13 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { userConfig.ParallelPoolSize, userConfig.SilenceNoProjects, userConfig.SilenceVCSStatusNoProjects, + pullReqStatusFetcher, ) approvePoliciesCommandRunner := events.NewApprovePoliciesCommandRunner( commitStatusUpdater, projectCommandBuilder, - projectCommandRunner, + projectOutputWrapper, pullUpdater, dbUpdater, userConfig.SilenceNoProjects, @@ -579,7 +612,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { versionCommandRunner := events.NewVersionCommandRunner( pullUpdater, projectCommandBuilder, - projectCommandRunner, + projectOutputWrapper, userConfig.ParallelPoolSize, userConfig.SilenceNoProjects, ) @@ -592,6 +625,11 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { models.VersionCommand: versionCommandRunner, } + githubTeamAllowlistChecker, err := events.NewTeamAllowlistChecker(userConfig.GithubTeamAllowlist) + if err != nil { + return nil, err + } + commandRunner := &events.DefaultCommandRunner{ VCSClient: vcsClient, GithubPullGetter: githubClient, @@ -609,6 +647,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { Drainer: drainer, PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, PullStatusFetcher: boltdb, + TeamAllowlistChecker: githubTeamAllowlistChecker, } repoAllowlist, err := events.NewRepoAllowlistChecker(userConfig.RepoAllowlist) if err != nil { @@ -627,6 +666,23 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { DB: boltdb, DeleteLockCommand: deleteLockCommand, } + + wsMux := websocket.NewMultiplexor( + logger, + controllers.ProjectInfoKeyGenerator{}, + projectCmdOutputHandler, + ) + + jobsController := &controllers.JobsController{ + AtlantisVersion: config.AtlantisVersion, + AtlantisURL: parsedURL, + Logger: logger, + ProjectJobsTemplate: templates.ProjectJobsTemplate, + ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate, + Db: boltdb, + WsMux: wsMux, + } + eventsController := &events_controllers.VCSEventsController{ CommandRunner: commandRunner, PullCleaner: pullClosedExecutor, @@ -653,8 +709,8 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { GithubSetupComplete: githubAppEnabled, GithubHostname: userConfig.GithubHostname, GithubOrg: userConfig.GithubOrg, + GithubStatusName: userConfig.VCSStatusName, } - return &Server{ AtlantisVersion: config.AtlantisVersion, AtlantisURL: parsedURL, @@ -668,12 +724,16 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { VCSEventsController: eventsController, GithubAppController: githubAppController, LocksController: locksController, + JobsController: jobsController, StatusController: statusController, IndexTemplate: templates.IndexTemplate, LockDetailTemplate: templates.LockTemplate, + ProjectJobsTemplate: templates.ProjectJobsTemplate, + ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate, SSLKeyFile: userConfig.SSLKeyFile, SSLCertFile: userConfig.SSLCertFile, Drainer: drainer, + ProjectCmdOutputHandler: projectCmdOutputHandler, }, nil } @@ -693,6 +753,9 @@ func (s *Server) Start() error { s.Router.HandleFunc("/locks", s.LocksController.DeleteLock).Methods("DELETE").Queries("id", "{id:.*}") s.Router.HandleFunc("/lock", s.LocksController.GetLock).Methods("GET"). Queries(LockViewRouteIDQueryParam, fmt.Sprintf("{%s}", LockViewRouteIDQueryParam)).Name(LockViewRouteName) + s.Router.HandleFunc("/jobs/{org}/{repo}/{pull}/{project}/{workspace}", s.JobsController.GetProjectJobs).Methods("GET").Name(ProjectJobsViewRouteName) + s.Router.HandleFunc("/jobs/{org}/{repo}/{pull}/{project}/{workspace}/ws", s.JobsController.GetProjectJobsWS).Methods("GET") + n := negroni.New(&negroni.Recovery{ Logger: log.New(os.Stdout, "", log.LstdFlags), PrintStack: false, @@ -708,6 +771,10 @@ func (s *Server) Start() error { // Stop on SIGINTs and SIGTERMs. signal.Notify(stop, os.Interrupt, syscall.SIGTERM) + go func() { + s.ProjectCmdOutputHandler.Handle() + }() + server := &http.Server{Addr: fmt.Sprintf(":%d", s.Port), Handler: n} go func() { s.Logger.Info("Atlantis started - listening on port %v", s.Port) @@ -808,7 +875,7 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { func mkSubDir(parentDir string, subDir string) (string, error) { fullDir := filepath.Join(parentDir, subDir) if err := os.MkdirAll(fullDir, 0700); err != nil { - return "", errors.Wrapf(err, "unable to creare dir %q", fullDir) + return "", errors.Wrapf(err, "unable to create dir %q", fullDir) } return fullDir, nil diff --git a/server/server_test.go b/server/server_test.go index 3b62d35b3c..4ed72785a1 100644 --- a/server/server_test.go +++ b/server/server_test.go @@ -16,10 +16,11 @@ package server_test import ( "bytes" "errors" - "io/ioutil" + "io" "net/http" "net/http/httptest" "net/url" + "os" "testing" "time" @@ -36,7 +37,7 @@ import ( func TestNewServer(t *testing.T) { t.Log("Run through NewServer constructor") - tmpDir, err := ioutil.TempDir("", "") + tmpDir, err := os.MkdirTemp("", "") Ok(t, err) _, err = server.NewServer(server.UserConfig{ DataDir: tmpDir, @@ -48,7 +49,7 @@ func TestNewServer(t *testing.T) { // todo: test what happens if we set different flags. The generated config should be different. func TestNewServer_InvalidAtlantisURL(t *testing.T) { - tmpDir, err := ioutil.TempDir("", "") + tmpDir, err := os.MkdirTemp("", "") Ok(t, err) _, err = server.NewServer(server.UserConfig{ DataDir: tmpDir, @@ -138,7 +139,7 @@ func TestHealthz(t *testing.T) { w := httptest.NewRecorder() s.Healthz(w, req) Equals(t, http.StatusOK, w.Result().StatusCode) - body, _ := ioutil.ReadAll(w.Result().Body) + body, _ := io.ReadAll(w.Result().Body) Equals(t, "application/json", w.Result().Header["Content-Type"][0]) Equals(t, `{ diff --git a/server/user_config.go b/server/user_config.go index 4875ad7671..d0d1811aac 100644 --- a/server/user_config.go +++ b/server/user_config.go @@ -17,6 +17,7 @@ type UserConfig struct { AzureDevopsUser string `mapstructure:"azuredevops-user"` AzureDevopsWebhookPassword string `mapstructure:"azuredevops-webhook-password"` AzureDevopsWebhookUser string `mapstructure:"azuredevops-webhook-user"` + AzureDevOpsHostname string `mapstructure:"azuredevops-hostname"` BitbucketBaseURL string `mapstructure:"bitbucket-base-url"` BitbucketToken string `mapstructure:"bitbucket-token"` BitbucketUser string `mapstructure:"bitbucket-user"` @@ -40,6 +41,7 @@ type UserConfig struct { GithubAppKey string `mapstructure:"gh-app-key"` GithubAppKeyFile string `mapstructure:"gh-app-key-file"` GithubAppSlug string `mapstructure:"gh-app-slug"` + GithubTeamAllowlist string `mapstructure:"gh-team-allowlist"` GitlabHostname string `mapstructure:"gitlab-hostname"` GitlabToken string `mapstructure:"gitlab-token"` GitlabUser string `mapstructure:"gitlab-user"` @@ -86,6 +88,9 @@ type UserConfig struct { VCSStatusName string `mapstructure:"vcs-status-name"` DefaultTFVersion string `mapstructure:"default-tf-version"` Webhooks []WebhookConfig `mapstructure:"webhooks"` + WebBasicAuth bool `mapstructure:"web-basic-auth"` + WebUsername string `mapstructure:"web-username"` + WebPassword string `mapstructure:"web-password"` WriteGitCreds bool `mapstructure:"write-git-creds"` } diff --git a/testdrive/testdrive.go b/testdrive/testdrive.go index 0e0356fde0..8ad0d1f86c 100644 --- a/testdrive/testdrive.go +++ b/testdrive/testdrive.go @@ -18,7 +18,6 @@ package testdrive import ( "context" "fmt" - "io/ioutil" "os" "os/exec" "os/signal" @@ -173,11 +172,11 @@ tunnels: proto: http `, ngrokAPIURL, atlantisPort) - ngrokConfigFile, err := ioutil.TempFile("", "") + ngrokConfigFile, err := os.CreateTemp("", "") if err != nil { return errors.Wrap(err, "creating ngrok config file") } - err = ioutil.WriteFile(ngrokConfigFile.Name(), []byte(ngrokConfig), 0600) + err = os.WriteFile(ngrokConfigFile.Name(), []byte(ngrokConfig), 0600) if err != nil { return errors.Wrap(err, "writing ngrok config file") } @@ -211,7 +210,7 @@ tunnels: // Start atlantis server. colorstring.Println("=> starting atlantis server") s.Start() - tmpDir, err := ioutil.TempDir("", "") + tmpDir, err := os.MkdirTemp("", "") if err != nil { return errors.Wrap(err, "creating a temporary data directory for Atlantis") } diff --git a/testdrive/utils.go b/testdrive/utils.go index 77645e45b6..39826cb728 100644 --- a/testdrive/utils.go +++ b/testdrive/utils.go @@ -20,7 +20,6 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "net/http" "os" "os/exec" @@ -31,17 +30,17 @@ import ( "time" "github.com/pkg/errors" - "golang.org/x/crypto/ssh/terminal" + "golang.org/x/term" ) const hashicorpReleasesURL = "https://releases.hashicorp.com" -const terraformVersion = "0.10.8" +const terraformVersion = "1.1.2" const ngrokDownloadURL = "https://bin.equinox.io/c/4VmDzA7iaHb" const ngrokAPIURL = "localhost:41414" // We hope this isn't used. const atlantisPort = 4141 func readPassword() (string, error) { - password, err := terminal.ReadPassword(int(syscall.Stdin)) // nolint: unconvert + password, err := term.ReadPassword(int(syscall.Stdin)) // nolint: unconvert return string(password), err } @@ -123,7 +122,7 @@ func getTunnelAddr() (string, error) { var t tunnels - body, err := ioutil.ReadAll(response.Body) + body, err := io.ReadAll(response.Body) if err != nil { return "", errors.Wrap(err, "reading ngrok api") } @@ -214,7 +213,7 @@ func execAndWaitForStderr(wg *sync.WaitGroup, stderrMatch *regexp.Regexp, timeou cancel() // We still need to wait for the command to finish. command.Wait() // nolint: errcheck - return cancel, errChan, fmt.Errorf("timeout, logs:\n%s\n", log) // nolint: staticcheck, golint + return cancel, errChan, fmt.Errorf("timeout, logs:\n%s\n", log) // nolint: staticcheck, revive } // Increment the wait group so callers can wait for the command to finish. diff --git a/testing/Dockerfile b/testing/Dockerfile index dad46be829..80b4900e06 100644 --- a/testing/Dockerfile +++ b/testing/Dockerfile @@ -1,22 +1,26 @@ -FROM circleci/golang:1.17 +FROM golang:1.17 + +RUN apt-get update && apt-get install unzip # Install Terraform -ENV TERRAFORM_VERSION=1.0.6 -RUN curl -LOks https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip && \ - sudo mkdir -p /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \ - sudo unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \ - sudo ln -s /usr/local/bin/tf/versions/${TERRAFORM_VERSION}/terraform /usr/local/bin/terraform && \ - rm terraform_${TERRAFORM_VERSION}_linux_amd64.zip +ENV TERRAFORM_VERSION=1.1.2 +RUN case $(uname -m) in x86_64|amd64) ARCH="amd64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \ + wget -nv -O terraform.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_${ARCH}.zip && \ + mkdir -p /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \ + unzip terraform.zip -d /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \ + ln -s /usr/local/bin/tf/versions/${TERRAFORM_VERSION}/terraform /usr/local/bin/terraform && \ + rm terraform.zip # Install conftest -ENV CONFTEST_VERSION=0.27.0 -RUN curl -LOs https://github.com/open-policy-agent/conftest/releases/download/v${CONFTEST_VERSION}/conftest_${CONFTEST_VERSION}_Linux_x86_64.tar.gz && \ +ENV CONFTEST_VERSION=0.28.3 +RUN case $(uname -m) in x86_64|amd64) ARCH="x86_64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \ + curl -LOs https://github.com/open-policy-agent/conftest/releases/download/v${CONFTEST_VERSION}/conftest_${CONFTEST_VERSION}_Linux_${ARCH}.tar.gz && \ curl -LOs https://github.com/open-policy-agent/conftest/releases/download/v${CONFTEST_VERSION}/checksums.txt && \ - sed -n "/conftest_${CONFTEST_VERSION}_Linux_x86_64.tar.gz/p" checksums.txt | sha256sum -c && \ - sudo mkdir -p /usr/local/bin/cft/versions/${CONFTEST_VERSION} && \ - sudo tar -C /usr/local/bin/cft/versions/${CONFTEST_VERSION} -xzf conftest_${CONFTEST_VERSION}_Linux_x86_64.tar.gz && \ - sudo ln -s /usr/local/bin/cft/versions/${CONFTEST_VERSION}/conftest /usr/local/bin/conftest${CONFTEST_VERSION} && \ - rm conftest_${CONFTEST_VERSION}_Linux_x86_64.tar.gz && \ + sed -n "/conftest_${CONFTEST_VERSION}_Linux_${ARCH}.tar.gz/p" checksums.txt | sha256sum -c && \ + mkdir -p /usr/local/bin/cft/versions/${CONFTEST_VERSION} && \ + tar -C /usr/local/bin/cft/versions/${CONFTEST_VERSION} -xzf conftest_${CONFTEST_VERSION}_Linux_${ARCH}.tar.gz && \ + ln -s /usr/local/bin/cft/versions/${CONFTEST_VERSION}/conftest /usr/local/bin/conftest${CONFTEST_VERSION} && \ + rm conftest_${CONFTEST_VERSION}_Linux_${ARCH}.tar.gz && \ rm checksums.txt RUN go get golang.org/x/tools/cmd/goimports diff --git a/testing/http.go b/testing/http.go index ba19b54084..c6623e10bf 100644 --- a/testing/http.go +++ b/testing/http.go @@ -1,7 +1,7 @@ package testing import ( - "io/ioutil" + "io" "net/http/httptest" "strings" "testing" @@ -9,7 +9,7 @@ import ( func ResponseContains(t *testing.T, r *httptest.ResponseRecorder, status int, bodySubstr string) { t.Helper() - body, err := ioutil.ReadAll(r.Result().Body) + body, err := io.ReadAll(r.Result().Body) Ok(t, err) Assert(t, status == r.Result().StatusCode, "exp %d got %d, body: %s", status, r.Result().StatusCode, string(body)) Assert(t, strings.Contains(string(body), bodySubstr), "exp %q to be contained in %q", bodySubstr, string(body)) diff --git a/testing/temp_files.go b/testing/temp_files.go index 6bab8f03ab..3ee8b062c4 100644 --- a/testing/temp_files.go +++ b/testing/temp_files.go @@ -1,7 +1,6 @@ package testing import ( - "io/ioutil" "os" "path/filepath" "testing" @@ -12,7 +11,7 @@ import ( // dir, cleanup := TempDir() // defer cleanup() func TempDir(t *testing.T) (string, func()) { - tmpDir, err := ioutil.TempDir("", "") + tmpDir, err := os.MkdirTemp("", "") Ok(t, err) return tmpDir, func() { os.RemoveAll(tmpDir) // nolint: errcheck @@ -65,7 +64,7 @@ func dirStructureGo(t *testing.T, parentDir string, structure map[string]interfa dirStructureGo(t, subDir, dirContents) } else if fileContent, ok := val.(string); ok { // If val is a string then key is a file name and val is the file's content - err := ioutil.WriteFile(filepath.Join(parentDir, key), []byte(fileContent), 0600) + err := os.WriteFile(filepath.Join(parentDir, key), []byte(fileContent), 0600) Ok(t, err) } } diff --git a/yarn.lock b/yarn.lock index f32f477d28..49d3d1f92b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4564,8 +4564,8 @@ pretty-time@^1.0.0: nanoseconds "^1.0.0" prismjs@^1.13.0: - version "1.24.0" - resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.24.0.tgz#0409c30068a6c52c89ef7f1089b3ca4de56be2ac" + version "1.25.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.25.0.tgz#6f822df1bdad965734b310b315a23315cf999756" private@^0.1.6: version "0.1.8"