diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 227603cbbd..0000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,30 +0,0 @@ -version: 2 -jobs: - e2e: - docker: - - image: cimg/go:1.22 # If you update this, update it in the Makefile too - environment: - # This version of TF will be downloaded before Atlantis is started. - # We do this instead of setting --default-tf-version because setting - # that flag starts the download asynchronously so we'd have a race - # condition. - # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp - TERRAFORM_VERSION: 1.7.4 - steps: - - checkout - - run: make build-service - # We don't run e2e tests on fork PRs because they don't have access to the secret env vars. - - run: if [ -z "${CIRCLE_PR_REPONAME}" ]; then ./scripts/e2e.sh; fi - -workflows: - version: 2 - branch: - jobs: - - e2e: - context: - - atlantis-e2e-tests - filters: - branches: - # Ignore fork PRs since they don't have access to - # the atlantis-e2e-tests context (and also doc PRs). - ignore: /(pull\/\d+)|(docs\/.*)/ diff --git a/.dockerignore b/.dockerignore index d9647e1977..523596ac26 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,5 +1,6 @@ * !cmd/ +!scripts/download-release.sh !server/ !testdrive/ !main.go diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000..3f90f6406f --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +indent_style = space +indent_size = 3 +trim_trailing_whitespace = false diff --git a/.gitattributes b/.gitattributes index 60fad8a123..d56abbf304 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,11 +1,2 @@ # Set the default behavior, in case people don't have core.autocrlf set. -* text=auto - -# Explicitly declare text files you want to always be normalized and converted -# to native line endings on checkout. -*.go text -*.json text -*.yml text -*.yaml text -*.sh text -*.tf text +* text=auto eol=lf diff --git a/.github/labeler.yml b/.github/labeler.yml index 7d6cf75daf..6dd6741d81 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -14,7 +14,8 @@ docs: github-actions: - changed-files: - - any-glob-to-any-file: '.github/**' + - any-glob-to-any-file: + - '.github/workflows/*.yml' go: - changed-files: @@ -40,6 +41,10 @@ provider/gitlab: website: - changed-files: - - any-glob-to-any-file: 'runatlantis.io/.vuepress/**/*' + - any-glob-to-any-file: 'runatlantis.io/.vitepress/**/*' - any-glob-to-any-file: 'package.json' - - any-glob-to-any-file: 'pnpm-lock.yaml' + - any-glob-to-any-file: 'package-lock.json' + +blog: +- changed-files: + - any-glob-to-any-file: 'runatlantis.io/blog/**' diff --git a/.github/release.yml b/.github/release.yml index df2619416f..0029e2d496 100644 --- a/.github/release.yml +++ b/.github/release.yml @@ -5,6 +5,7 @@ changelog: - github-actions authors: - octocat + - renovate[bot] categories: - title: Breaking Changes 🛠 labels: @@ -15,9 +16,9 @@ changelog: - Semver-Minor - enhancement - feature - - title: Bug fixes + - title: Bug fixes 🐛 labels: - bug - - title: Other Changes + - title: Other Changes 🔄 labels: - "*" diff --git a/.github/renovate.json5 b/.github/renovate.json5 index e520313b2a..8ca42d1d5d 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -1,108 +1,119 @@ { extends: [ - "config:base", - "schedule:daily", + 'config:best-practices', + ':separateMultipleMajorReleases', + 'schedule:daily', ], - commitMessageSuffix: " in {{packageFile}}", + commitMessageSuffix: ' in {{packageFile}}', dependencyDashboardAutoclose: true, automerge: true, - baseBranches: ["main", "/^release\-.*/"], + baseBranches: [ + 'main', + '/^release-.*/', + ], platformAutomerge: true, - labels: ["dependencies"], + labels: [ + 'dependencies', + ], postUpdateOptions: [ - "gomodTidy", - "gomodUpdateImportPaths", - "pnpmDedupe", + 'gomodTidy', + 'gomodUpdateImportPaths', + 'npmDedupe', ], - // needed so e2e tests do not stomp over each other prHourlyLimit: 1, - lockFileMaintenance: { - enabled: true, - }, + minimumReleaseAge: '5 days', + osvVulnerabilityAlerts: true, vulnerabilityAlerts: { enabled: true, labels: [ - "security", + 'security', ], }, packageRules: [ - // For vuepress { - "matchPackageNames": ["vuepress", "@vuepress/client", "@vuepress/markdown", "@vuepress/utils"], - "groupName": "vuepress", - "allowedVersions": "!/pre.*$/", + matchFileNames: [ + 'package.json', + ], + enabled: false, }, - // e2e test depends on testing/Dockefile testing-image which has conftest specific version. - // to upgrade conftest versions, we need following PRs. - // 1. update testing/Dockerfile conftest version - // 2. update testing-env tag - // 3. update e2e conftest version - // This will allow conftest version updates in testing/Dockefile { - matchPaths: ["testing/**"], - matchPackagePatterns: ["conftest"], - additionalBranchPrefix: "{{baseDir}}-", - groupName: "conftest-testing", - /* - prBodyNotes: [ - ":warning: Upgrade testing-env conftest and then upgrade other conftest versions for e2e :warning:", + matchFileNames: [ + 'testing/**', + ], + additionalBranchPrefix: '{{packageFileDir}}-', + groupName: 'conftest-testing', + matchPackageNames: [ + '/conftest/', ], - */ }, { - ignorePaths: ["testing/**"], - matchPackagePatterns: ["github-actions"], - groupName: "github-", + ignorePaths: [ + 'testing/**', + ], + groupName: 'github-', + matchPackageNames: [ + '/github-actions/', + ], }, - /* - // This tag is currently latest so we can skip this check for now unless we need to pin it again. { - // we need to upgrade testing-env on ci quickly - matchPackageNames: ["ghcr.io/runatlantis/testing-env"], - groupName: "testing-env-ci-test", - schedule: ["every 1 hour after 00:00 and before 23:59 every day"], + matchDatasources: [ + 'docker', + ], + matchPackageNames: [ + 'node', + 'cimg/node', + ], + versioning: 'node', }, - */ { - // use LTS node version for node docker image - matchDatasources: ["docker"], - matchPackageNames: ["node", "cimg/node"], - versioning: "node", + matchPackageNames: [ + 'go', + 'golang', + ], + versioning: 'go', + groupName: 'go', }, ], - // https://docs.renovatebot.com/modules/manager/regex/ - regexManagers: [ + customManagers: [ { - fileMatch: ["(^|/)Dockerfile$", "(^|/)Dockerfile\\.[^/]*$"], + customType: 'regex', + fileMatch: [ + '(^|/)Dockerfile$', + '(^|/)Dockerfile\\.[^/]*$', + ], matchStrings: [ // example: - // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp + // # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp // ENV DEFAULT_TERRAFORM_VERSION=x.x.x - "renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\sENV .*?_VERSION=(?.*)\\s", + // # renovate: datasource=github-releases depName=open-policy-agent/conftest + // ARG DEFAULT_CONFTEST_VERSION=x.x.x + 'renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s(ARG|ENV) .*?_VERSION=(?.*)\\s', ], - versioningTemplate: "{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}", + versioningTemplate: '{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}', extractVersionTemplate: '^v(?\\d+\\.\\d+\\.\\d+)', }, { - fileMatch: [".*go$"], + customType: 'regex', + fileMatch: [ + '.*go$', + ], matchStrings: [ - // example: - // const ConftestVersion = "x.x.x" // renovate: datasource=github-releases depName=open-policy-agent/conftest - "\\sconst .*Version = \"(?.*)\"\\s// renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s", + '\\sconst .*Version = "(?.*)"\\s// renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s', ], - versioningTemplate: "{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}", + versioningTemplate: '{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}', extractVersionTemplate: '^v(?\\d+\\.\\d+\\.\\d+)', }, { - fileMatch: [".circleci/config.yml$"], + customType: 'regex', + fileMatch: [ + '^\\.github/workflows/[^/]+\\.ya?ml$', + 'Makefile$', + ], matchStrings: [ - // example: - // # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp - // TRRAFORM_VERSION: x.x.x - "renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s.*?_VERSION: (?.*)\\s", + 'renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s.*?_VERSION: (?.*)\\s', ], - versioningTemplate: "{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}", + versioningTemplate: '{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}', extractVersionTemplate: '^v(?\\d+\\.\\d+\\.\\d+)', }, - ] + ], } diff --git a/.github/workflows/atlantis-image.yml b/.github/workflows/atlantis-image.yml index 5ec8fc9ecd..34c2e3e430 100644 --- a/.github/workflows/atlantis-image.yml +++ b/.github/workflows/atlantis-image.yml @@ -6,12 +6,16 @@ on: - 'main' - 'release-**' tags: - - v*.*.* # stable release like, v0.19.2 - - v*.*.*-pre.* # pre release like, v0.19.0-pre.calendardate + - v*.*.* pull_request: branches: - 'main' - 'release-**' + types: + - opened + - reopened + - synchronize + - ready_for_review workflow_dispatch: concurrency: @@ -23,10 +27,10 @@ jobs: outputs: should-run-build: ${{ steps.changes.outputs.src == 'true' || startsWith(github.ref, 'refs/tags/') }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -44,7 +48,7 @@ jobs: strategy: matrix: image_type: [alpine, debian] - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 env: # Set docker repo to either the fork or the main repo where the branch exists DOCKER_REPO: ghcr.io/${{ github.repository }} @@ -52,26 +56,26 @@ jobs: PUSH: ${{ github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 # Lint the Dockerfile first before setting anything up - name: Lint Dockerfile - uses: hadolint/hadolint-action@v3.1.0 + uses: hadolint/hadolint-action@54c9adbab1582c2ef04b2016b760714a4bfde3cf # v3.1.0 with: dockerfile: "Dockerfile" - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3 with: image: tonistiigi/binfmt:latest platforms: arm64,arm - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3 # https://github.com/docker/build-push-action/issues/761#issuecomment-1575006515 with: driver-opts: | - image=moby/buildkit:v0.10.6 + image=moby/buildkit:v0.14.0 # release version is the name of the tag i.e. v0.10.0 # release version also has the image type appended i.e. v0.10.0-alpine @@ -81,7 +85,7 @@ jobs: # if it's v0.10.0 and debian, it will do v0.10.0-debian, latest-debian - name: Docker meta id: meta - uses: docker/metadata-action@v5 + uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5 env: SUFFIX: ${{ format('-{0}', matrix.image_type) }} with: @@ -113,7 +117,7 @@ jobs: # Suffix is not used here since there's no way to disable it above - name: Login to Packages Container registry - uses: docker/login-action@v3 + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -126,7 +130,7 @@ jobs: - name: "Build ${{ env.PUSH == 'true' && 'and push' || '' }} ${{ env.DOCKER_REPO }} image" if: contains(fromJson('["push", "pull_request"]'), github.event_name) - uses: docker/build-push-action@v5 + uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6 with: cache-from: type=gha cache-to: type=gha,mode=max @@ -143,6 +147,51 @@ jobs: labels: ${{ steps.meta.outputs.labels }} outputs: type=image,name=target,annotation-index.org.opencontainers.image.description=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.description'] }} + test: + needs: [changes] + if: needs.changes.outputs.should-run-build == 'true' + name: Test Image With Goss + runs-on: ubuntu-24.04 + strategy: + matrix: + image_type: [alpine, debian] + env: + # Set docker repo to either the fork or the main repo where the branch exists + DOCKER_REPO: ghcr.io/${{ github.repository }} + + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3 + # https://github.com/docker/build-push-action/issues/761#issuecomment-1575006515 + with: + driver-opts: | + image=moby/buildkit:v0.14.0 + + - name: "Build and load into Docker" + if: contains(fromJson('["push", "pull_request"]'), github.event_name) + uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6 + with: + cache-from: type=gha + cache-to: type=gha,mode=max + context: . + build-args: | + ATLANTIS_BASE_TAG_TYPE=${{ matrix.image_type }} + push: false + load: true + tags: "${{ env.DOCKER_REPO }}:goss-test" + target: ${{ matrix.image_type }} + + - name: "Setup Goss" + uses: e1himself/goss-installation-action@fbb6fb55d3e59c96045b2500eeb8ce0995d99ac1 # v1.2.1 + with: + version: "v0.4.7" + + - name: Execute Goss tests + run: | + dgoss run --rm ${{ env.DOCKER_REPO }}:goss-test bash -c 'while true; do sleep 1; done;' + skip-build: needs: [changes] if: needs.changes.outputs.should-run-build == 'false' @@ -150,6 +199,6 @@ jobs: strategy: matrix: image_type: [alpine, debian] - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - run: 'echo "No build required"' diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 8be3df50f9..2cfc8eaa24 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -30,15 +30,21 @@ on: schedule: - cron: '17 9 * * 5' +permissions: + contents: read + jobs: changes: + permissions: + contents: read # for dorny/paths-filter to fetch a list of changed files + pull-requests: read # for dorny/paths-filter to read pull requests outputs: should-run-analyze: ${{ steps.changes.outputs.src == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -50,7 +56,7 @@ jobs: needs: [changes] name: Analyze if: github.event.pull_request.draft == false && needs.changes.outputs.should-run-analyze == 'true' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: actions: read contents: read @@ -67,11 +73,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@662472033e021d55d94146f66f6058822b0b39fd # v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -85,7 +91,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v3 + uses: github/codeql-action/autobuild@662472033e021d55d94146f66f6058822b0b39fd # v3 # ℹī¸ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun @@ -98,7 +104,7 @@ jobs: # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@662472033e021d55d94146f66f6058822b0b39fd # v3 with: category: "/language:${{matrix.language}}" @@ -109,6 +115,6 @@ jobs: strategy: matrix: language: [ 'go', 'javascript' ] - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - run: 'echo "No build required"' + - run: 'echo "No build required"' diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index aed089def0..de8bd74352 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -8,12 +8,15 @@ on: - synchronize - ready_for_review +permissions: + contents: read + jobs: triage: permissions: contents: read pull-requests: write if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/labeler@v5 + - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 02ab6f7365..115068ed48 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -9,21 +9,29 @@ on: - ready_for_review branches: - "main" - - 'release-**' + - "release-**" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + # Required: allow read access to the content for analysis. + contents: read + # Optional: allow read access to pull request. Use with `only-new-issues` option. + pull-requests: read + # Optional: Allow write access to checks to allow the action to annotate code in the PR. + checks: write + jobs: changes: outputs: should-run-linting: ${{ steps.changes.outputs.go == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -37,24 +45,25 @@ jobs: needs: [changes] if: github.event.pull_request.draft == false && needs.changes.outputs.should-run-linting == 'true' name: Linting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - # need to setup go toolchain explicitly - - uses: actions/setup-go@v5 - with: - go-version-file: go.mod + # need to setup go toolchain explicitly + - uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # v5 + with: + go-version-file: go.mod - - name: golangci-lint - uses: reviewdog/action-golangci-lint@v2 - with: - tool_name: golangci-lint + - name: golangci-lint + uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6 + with: + # renovate: datasource=github-releases depName=golangci/golangci-lint + version: v1.60.1 skip-lint: needs: [changes] if: needs.changes.outputs.should-run-linting == 'false' name: Linting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - run: 'echo "No build required"' diff --git a/.github/workflows/pr-lint.yml b/.github/workflows/pr-lint.yml index e3a5b647ac..6ec8adfc59 100644 --- a/.github/workflows/pr-lint.yml +++ b/.github/workflows/pr-lint.yml @@ -13,8 +13,8 @@ permissions: jobs: main: name: Validate PR title - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: amannn/action-semantic-pull-request@v5 + - uses: amannn/action-semantic-pull-request@0723387faaf9b38adef4775cd42cfd5155ed6017 # v5 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pr-size-labeler.yml b/.github/workflows/pr-size-labeler.yml new file mode 100644 index 0000000000..4e48b776d9 --- /dev/null +++ b/.github/workflows/pr-size-labeler.yml @@ -0,0 +1,33 @@ +name: pr-size + +on: [pull_request] + +permissions: + contents: read + +jobs: + labeler: + permissions: + pull-requests: write # for codelytv/pr-size-labeler to add labels & comment on PRs + runs-on: ubuntu-latest + name: Label the PR size + steps: + - uses: codelytv/pr-size-labeler@c7a55a022747628b50f3eb5bf863b9e796b8f274 # v1 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + xs_label: 'size/xs' + xs_max_size: '10' + s_label: 'size/s' + s_max_size: '200' + m_label: 'size/m' + m_max_size: '1000' + l_label: 'size/l' + l_max_size: '10000' + xl_label: 'size/xl' + fail_if_xl: 'false' + message_if_xl: > + This PR exceeds the recommended size of 1000 lines. + Please make sure you are NOT addressing multiple issues with one PR. + Note this PR might be rejected due to its size. + github_api_url: 'https://api.github.com' + files_to_ignore: '' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 479e404f69..4acf1d4b13 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,30 +3,30 @@ name: release on: push: tags: - - v*.*.* # stable release like, v0.19.2 - - v*.*.*-pre.* # pre release like, v0.19.0-pre.calendardate + - v*.*.* workflow_dispatch: jobs: goreleaser: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: submodules: true - - uses: actions/setup-go@v5 + - uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # v5 with: go-version-file: go.mod - name: Run GoReleaser for stable release - uses: goreleaser/goreleaser-action@v5 + uses: goreleaser/goreleaser-action@286f3b13b1b49da4ac219696163fb8c1c93e1200 # v6 if: (!contains(github.ref, 'pre')) with: - version: v1.16.2 # You can pass flags to goreleaser via GORELEASER_ARGS # --clean will save you deleting the dist dir args: release --clean + distribution: goreleaser # or 'goreleaser-pro' + version: "~> v2" # or 'latest', 'nightly', semver env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -41,32 +41,3 @@ jobs: -q .body > tmp-CHANGELOG.md env: GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - - - name: Run GoReleaser for pre-release - uses: goreleaser/goreleaser-action@v5 - if: contains(github.ref, 'pre') - with: - version: v1.16.2 - # You can pass flags to goreleaser via GORELEASER_ARGS - # --clean will save you deleting the dist dir - args: release --clean --release-notes=tmp-CHANGELOG.md - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GORELEASER_CURRENT_TAG: ${{ steps.changelog.outputs.RELEASE_TAG }} - - homebrew: - name: "Bump Homebrew formula" - runs-on: ubuntu-22.04 - if: false - # if: (!contains(github.ref, 'pre')) - steps: - - uses: mislav/bump-homebrew-formula-action@v2 - with: - # A PR will be sent to github.com/Homebrew/homebrew-core to update this formula: - formula-name: atlantis - commit-message: | - {{formulaName}} {{version}} - - Created by https://github.com/mislav/bump-homebrew-formula-action - env: - COMMITTER_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/renovate-config.yml b/.github/workflows/renovate-config.yml index bb5258df99..06283df876 100644 --- a/.github/workflows/renovate-config.yml +++ b/.github/workflows/renovate-config.yml @@ -12,10 +12,13 @@ on: - '.github/renovate.json5' workflow_dispatch: +permissions: + contents: read + jobs: validate: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4 - run: npx --package renovate -c 'renovate-config-validator' diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 0000000000..9b66c58652 --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,56 @@ +name: Scorecard supply-chain security +on: + schedule: + - cron: '0 5 * * 1' + push: + branches: + - main + +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + + steps: + - name: 'Checkout code' + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + show-progress: false + + - name: 'Run analysis' + uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 + with: + results_file: results.sarif + results_format: sarif + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: 'Upload artifact' + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: 'Upload to code-scanning' + uses: github/codeql-action/upload-sarif@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 + with: + sarif_file: results.sarif diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index c989d76963..0236da84c9 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -2,11 +2,17 @@ name: Close Stale PRs on: schedule: - cron: '30 1 * * *' +permissions: + contents: read + jobs: stale: - runs-on: ubuntu-22.04 + permissions: + issues: write # for actions/stale to close stale issues + pull-requests: write # for actions/stale to close stale PRs + runs-on: ubuntu-24.04 steps: - - uses: actions/stale@v9 + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 with: stale-pr-message: 'This issue is stale because it has been open for 1 month with no activity. Remove stale label or comment or this will be closed in 1 month.' stale-issue-message: This issue is stale because it has been open for 1 month with no activity. Remove stale label or comment or this will be closed in 1 month.' diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fab30b3b31..0d5d739db1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -3,8 +3,8 @@ name: tester on: push: branches: - - 'main' - - 'release-**' + - "main" + - "release-**" pull_request: types: - opened @@ -12,51 +12,60 @@ on: - synchronize - ready_for_review branches: - - 'main' - - 'release-**' + - "main" + - "release-**" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + contents: read + jobs: changes: + permissions: + contents: read # for dorny/paths-filter to fetch a list of changed files + pull-requests: read # for dorny/paths-filter to read pull requests outputs: should-run-tests: ${{ steps.changes.outputs.go == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | go: - '**.go' + - '**.txt' # golden file test output - 'go.*' + - '**.tmpl' - '.github/workflows/test.yml' test: needs: [changes] if: needs.changes.outputs.should-run-tests == 'true' name: Tests - runs-on: ubuntu-22.04 - container: ghcr.io/runatlantis/testing-env:latest + runs-on: ubuntu-24.04 + container: ghcr.io/runatlantis/testing-env:latest@sha256:5c56ee1df3dd9ea426bee50df43e2407df054e81f4b4eb183173e90a11f86922 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 # need to setup go toolchain explicitly - - uses: actions/setup-go@v5 + - uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # v5 with: go-version-file: go.mod - run: make test-all - run: make check-fmt + ########################################################### # Notifying #contributors about test failure on main branch ########################################################### - name: Slack failure notification if: ${{ github.ref == 'refs/heads/main' && failure() }} - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 + uses: slackapi/slack-github-action@37ebaef184d7626c5f204ab8d3baff4262dd30f0 # v1.27.0 with: payload: | { @@ -95,6 +104,81 @@ jobs: needs: [changes] if: needs.changes.outputs.should-run-tests == 'false' name: Tests - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - run: 'echo "No build required"' + + e2e-github: + runs-on: ubuntu-latest + # dont run e2e tests on forked PRs + if: github.event.pull_request.head.repo.fork == false + env: + TERRAFORM_VERSION: 1.9.2 + ATLANTIS_GH_USER: ${{ secrets.ATLANTISBOT_GITHUB_USERNAME }} + ATLANTIS_GH_TOKEN: ${{ secrets.ATLANTISBOT_GITHUB_TOKEN }} + NGROK_AUTH_TOKEN: ${{ secrets.ATLANTISBOT_NGROK_AUTH_TOKEN }} + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # v5 + with: + go-version-file: go.mod + + # This version of TF will be downloaded before Atlantis is started. + # We do this instead of setting --default-tf-version because setting + # that flag starts the download asynchronously so we'd have a race + # condition. + - uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3 + with: + terraform_version: ${{ env.TERRAFORM_VERSION }} + + - name: Setup ngrok + run: | + wget -q -O ngrok.tar.gz https://bin.equinox.io/a/4no1PS1PoRF/ngrok-v3-3.13.0-linux-amd64.tar.gz + tar -xzf ngrok.tar.gz + chmod +x ngrok + ./ngrok version + - name: Setup gitconfig + run: | + git config --global user.email "maintainers@runatlantis.io" + git config --global user.name "atlantisbot" + + - run: | + make build-service + ./scripts/e2e.sh + e2e-gitlab: + runs-on: ubuntu-latest + # dont run e2e tests on forked PRs + if: github.event.pull_request.head.repo.fork == false + env: + TERRAFORM_VERSION: 1.9.2 + ATLANTIS_GITLAB_USER: ${{ secrets.ATLANTISBOT_GITLAB_USERNAME }} + ATLANTIS_GITLAB_TOKEN: ${{ secrets.ATLANTISBOT_GITLAB_TOKEN }} + NGROK_AUTH_TOKEN: ${{ secrets.ATLANTISBOT_NGROK_AUTH_TOKEN }} + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # v5 + with: + go-version-file: go.mod + + # This version of TF will be downloaded before Atlantis is started. + # We do this instead of setting --default-tf-version because setting + # that flag starts the download asynchronously so we'd have a race + # condition. + - uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3 + with: + terraform_version: ${{ env.TERRAFORM_VERSION }} + + - name: Setup ngrok + run: | + wget -q -O ngrok.tar.gz https://bin.equinox.io/a/4no1PS1PoRF/ngrok-v3-3.13.0-linux-amd64.tar.gz + tar -xzf ngrok.tar.gz + chmod +x ngrok + ./ngrok version + - name: Setup gitconfig + run: | + git config --global user.email "maintainers@runatlantis.io" + git config --global user.name "atlantisbot" + + - run: | + make build-service + ./scripts/e2e.sh diff --git a/.github/workflows/testing-env-image.yml b/.github/workflows/testing-env-image.yml index 0cf8d5ecf2..cf61663805 100644 --- a/.github/workflows/testing-env-image.yml +++ b/.github/workflows/testing-env-image.yml @@ -17,13 +17,16 @@ concurrency: jobs: changes: + permissions: + contents: read # for dorny/paths-filter to fetch a list of changed files + pull-requests: read # for dorny/paths-filter to read pull requests outputs: should-run-build: ${{ steps.changes.outputs.src == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -35,21 +38,21 @@ jobs: needs: [changes] if: needs.changes.outputs.should-run-build == 'true' name: Build Testing Env Image - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3 with: image: tonistiigi/binfmt:latest platforms: arm64,arm - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3 - name: Login to Packages Container registry - uses: docker/login-action@v3 + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -57,7 +60,7 @@ jobs: - run: echo "TODAY=$(date +"%Y.%m.%d")" >> $GITHUB_ENV - name: Build and push testing-env:${{env.TODAY}} image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6 with: cache-from: type=gha cache-to: type=gha,mode=max @@ -72,6 +75,6 @@ jobs: needs: [changes] if: needs.changes.outputs.should-run-build == 'false' name: Build Testing Env Image - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - run: 'echo "No build required"' \ No newline at end of file + - run: 'echo "No build required"' diff --git a/.github/workflows/website.yml b/.github/workflows/website.yml index 8d58751deb..822384b82c 100644 --- a/.github/workflows/website.yml +++ b/.github/workflows/website.yml @@ -24,16 +24,16 @@ jobs: outputs: should-run-link-check: ${{ steps.changes.outputs.src == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | src: - - '**.js' - - 'pnpm-lock.yaml' + - 'runatlantis.io/**' + - 'package-lock.json' - 'package.json' - '.github/workflows/website.yml' @@ -46,36 +46,55 @@ jobs: name: Website Link Check runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - uses: wyvox/action-setup-pnpm@v3 + - name: markdown-lint + uses: DavidAnson/markdownlint-cli2-action@b4c9feab76d8025d1e83c653fa3990936df0e6c8 # v16 with: - node-version: 20 + config: .markdownlint.yaml + globs: 'runatlantis.io/**/*.md' + + - name: setup npm + uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4 + with: + node-version: '20' + cache: 'npm' - name: run http-server env: # renovate: datasource=github-releases depName=raviqqe/muffet - MUFFET_VERSION: 2.9.3 + MUFFET_VERSION: 2.10.3 run: | # install raviqqe/muffet to check for broken links. curl -Ls https://github.com/raviqqe/muffet/releases/download/v${MUFFET_VERSION}/muffet_linux_amd64.tar.gz | tar -xz # build site - pnpm install - pnpm website:build + npm install + npm run website:build # start http-server for integration testing - npx http-server runatlantis.io/.vuepress/dist & + npx http-server runatlantis.io/.vitepress/dist & + + - name: Run Playwright E2E tests + run: | + npx playwright install --with-deps + npm run e2e - name: wait until server listened run: curl --retry-delay 1 --retry 30 --retry-all-error http://localhost:8080 # medium.com => was being rate limited: HTTP 429 + # twitter.com => too many redirections + # www.flaticon.com => 403 error - run: | ./muffet \ -e 'https://medium.com/runatlantis' \ + -e 'https://dev.to/*' \ + -e 'https://twitter.com/*' \ + -e 'https://www.flaticon.com/*' \ -e 'https://github\.com/runatlantis/atlantis/edit/main/.*' \ -e 'https://github.com/runatlantis/helm-charts#customization' \ + -e 'https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64-L84' \ -e 'https://confluence.atlassian.com/*' \ --header 'Accept-Encoding:deflate, gzip' \ --buffer-size 8192 \ diff --git a/.gitignore b/.gitignore index a3040a1ee5..92cc521107 100644 --- a/.gitignore +++ b/.gitignore @@ -1,14 +1,12 @@ .idea ./atlantis *.iml -.vscode atlantis.db output .DS_Store .cover .terraform/ node_modules/ -**/.vuepress/* helm/test-values.yaml *.swp golangci-lint @@ -16,7 +14,6 @@ atlantis .devcontainer atlantis.env *.act -package-lock.json Dockerfile.local # gitreleaser @@ -27,3 +24,14 @@ tmp-CHANGELOG.md # IDE files *.code-workspace + +# draw.io backup files +*.bkp + +# VitePress build output & cache directory +**/.vitepress/cache +**/.vitepress/dist +**/.vitepress/config.ts.timestamp-* + +# playwright +test-results/ diff --git a/.golangci.yml b/.golangci.yml index b4c6d83c26..0afa70118c 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -19,14 +19,14 @@ linters: - gofmt - gosec - gosimple + - govet - ineffassign - misspell - revive - staticcheck + - testifylint - typecheck - unconvert - unused - - vet - - vetshadow run: timeout: 10m diff --git a/.goreleaser.yml b/.goreleaser.yml index b937a3da07..52d450ba21 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -1,15 +1,27 @@ +version: 2 + env: - CGO_ENABLED=0 + builds: - - targets: - - darwin_amd64 - - darwin_arm64 - - linux_386 - - linux_amd64 - - linux_arm - - linux_arm64 - - windows_386 - - windows_amd64 + - id: atlantis + + targets: + - darwin_amd64 + - darwin_arm64 + - linux_386 + - linux_amd64 + - linux_arm + - linux_arm64 + - windows_386 + - windows_amd64 + + flags: + - -trimpath + + ldflags: + - -s -w + - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} archives: - id: zip @@ -19,10 +31,10 @@ archives: - none* checksum: - name_template: 'checksums.txt' + name_template: "checksums.txt" changelog: - skip: true + disable: true release: # If set to true, will not auto-publish the release. diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 0000000000..9f4f9cacdc --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,38 @@ +# MD013/line-length +# +# We're not particular about line length, generally preferring longer +# lines, since tools like Grammarly and other writing assistance tools +# work best with "normal" lines not broken up arbitrary. +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md013.md +MD013: false + +# MD033/no-inline-html +# +# We're fine with inline HTML, there are lots of valid VitePress features +# that depends on this. +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md033.md +MD033: false + +# MD024/no-duplicate-heading +# +# VitePress do not follow GitHub heading styling, so duplicate headlines +# are fine as long as they are not siblings (aka same indention hierarchy) +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md024.md +MD024: + siblings_only: true + +# MD051/link-fragments +# +# VitePress generate these differently that markdownlint expects, so disabling +# for now, and something to improve on later (cc @jippi) +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md051.md +MD051: false + +# for blog posts +MD025: false +MD045: false +MD001: false diff --git a/.node-version b/.node-version index 2dbbe00e67..2a393af592 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -20.11.1 +20.18.0 diff --git a/.tool-versions b/.tool-versions index bda267d555..61fe7092be 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1 +1,2 @@ -pnpm 8.15.3 +node 20.14.0 +go 1.23.0 diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..b93fa58b76 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "git.alwaysSignOff": true +} diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 819b7f5a8c..def19ba7aa 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -55,8 +55,9 @@ further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team by messaging `@lkysow` on the [Atlantis Slack channel](https://join.slack.com/t/atlantis-community/shared_invite/zt-1nt7yx7uq-AnVRc_JItF1CDwZtfqv_OA). All -complaints will be reviewed and investigated and will result in a response that +reported by contacting the project team by messaging `@lkysow` on the +[Atlantis Slack community](https://communityinviter.com/apps/cloud-native/cncf). +All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3bd4290095..c64cde6e5e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,9 +1,24 @@ -# Topics -* [Reporting Issues](#reporting-issues) -* [Reporting Security Issues](#reporting-security-issues) -* [Updating The Website](#updating-the-website) -* [Developing](#developing) -* [Releasing](#creating-a-new-release) +# Contributing + +# Table of Contents +- [Reporting Issues](#reporting-issues) +- [Reporting Security Issues](#reporting-security-issues) +- [Updating The Website](#updating-the-website) +- [Developing](#developing) + - [Running Atlantis Locally](#running-atlantis-locally) + - [Running Atlantis With Local Changes](#running-atlantis-with-local-changes) + - [Rebuilding](#rebuilding) + - [Running Tests Locally](#running-tests-locally) + - [Running Tests In Docker](#running-tests-in-docker) + - [Calling Your Local Atlantis From GitHub](#calling-your-local-atlantis-from-github) + - [Code Style](#code-style) + - [Logging](#logging) + - [Errors](#errors) + - [Testing](#testing) + - [Mocks](#mocks) +- [Backporting Fixes](#backporting-fixes) + - [Manual Backporting Fixes](#manual-backporting-fixes) +- [Creating a New Release](#creating-a-new-release) # Reporting Issues * When reporting issues, please include the output of `atlantis version`. @@ -14,7 +29,7 @@ We take security issues seriously. Please report a security vulnerability to the maintainers using [private vulnerability reporting](https://github.com/runatlantis/atlantis/security/advisories/new). # Updating The Website -* To view the generated website locally, run `pnpm website:dev` and then +* To view the generated website locally, run `npm website:dev` and then open your browser to http://localhost:8080. * The website will be regenerated when your pull request is merged to main. @@ -23,11 +38,11 @@ open your browser to http://localhost:8080. ## Running Atlantis Locally * Clone the repo from https://github.com/runatlantis/atlantis/ * Compile Atlantis: - ``` + ```sh go install ``` * Run Atlantis: - ``` + ```sh atlantis server --gh-user --gh-token --repo-allowlist --gh-webhook-secret --log-level debug ``` If you get an error like `command not found: atlantis`, ensure that `$GOPATH/bin` is in your `$PATH`. @@ -36,43 +51,46 @@ open your browser to http://localhost:8080. Docker compose is set up to start an atlantis container and ngrok container in the same network in order to expose the atlantis instance to the internet. In order to do this, create a file in the repository called `atlantis.env` and add the required env vars for the atlantis server configuration. e.g. -``` + +```sh +NGROK_AUTH=1234567890 + ATLANTIS_GH_APP_ID=123 ATLANTIS_GH_APP_KEY_FILE="/.ssh/somekey.pem" ATLANTIS_GH_WEBHOOK_SECRET=12345 ``` -Note: `~/.ssh` is mounted to allow for referencing any local ssh keys +Note: `~/.ssh` is mounted to allow for referencing any local ssh keys. Following this just run: -``` +```sh make build-service -docker-compose up +docker-compose up --detach +docker-compose logs --follow ``` ### Rebuilding - If the ngrok container is restarted, the url will change which is a hassle. Fortunately, when we make a code change, we can rebuild and restart the atlantis container easily without disrupting ngrok. e.g. -``` +```sh make build-service docker-compose up --detach --build ``` -## Running Tests Locally: - +## Running Tests Locally `make test`. If you want to run the integration tests that actually run real `terraform` commands, run `make test-all`. -## Running Tests In Docker: -``` +## Running Tests In Docker +```sh docker run --rm -v $(pwd):/go/src/github.com/runatlantis/atlantis -w /go/src/github.com/runatlantis/atlantis ghcr.io/runatlantis/testing-env:latest make test ``` Or to run the integration tests -``` + +```sh docker run --rm -v $(pwd):/go/src/github.com/runatlantis/atlantis -w /go/src/github.com/runatlantis/atlantis ghcr.io/runatlantis/testing-env:latest make test-all ``` @@ -80,18 +98,19 @@ docker run --rm -v $(pwd):/go/src/github.com/runatlantis/atlantis -w /go/src/git - Create a test terraform repository in your GitHub. - Create a personal access token for Atlantis. See [Create a GitHub token](https://github.com/runatlantis/atlantis/tree/main/runatlantis.io/docs/access-credentials.md#generating-an-access-token). - Start Atlantis in server mode using that token: -``` +```sh atlantis server --gh-user --gh-token --repo-allowlist --gh-webhook-secret --log-level debug ``` - Download ngrok from https://ngrok.com/download. This will enable you to expose Atlantis running on your laptop to the internet so GitHub can call it. - When you've downloaded and extracted ngrok, run it on port `4141`: -``` +```sh ngrok http 4141 ``` - Create a Webhook in your repo and use the `https` url that `ngrok` printed out after running `ngrok http 4141`. Be sure to append `/events` so your webhook url looks something like `https://efce3bcd.ngrok.io/events`. See [Add GitHub Webhook](https://github.com/runatlantis/atlantis/blob/main/runatlantis.io/docs/configuring-webhooks.md#configuring-webhooks). - Create a pull request and type `atlantis help`. You should see the request in the `ngrok` and Atlantis logs and you should also see Atlantis comment back. ## Code Style + ### Logging - `ctx.Log` should be available in most methods. If not, pass it down. - levels: @@ -161,12 +180,11 @@ go get github.com/petergtz/pegomock/... ``` # Backporting Fixes - Atlantis now uses a [cherry-pick-bot](https://github.com/googleapis/repo-automation-bots/tree/main/packages/cherry-pick-bot) from Google. The bot assists in maintaining changes across releases branches by easily cherry-picking changes via pull requests. Maintainers and Core Contributors can add a comment to a pull request: -``` +```sh /cherry-pick target-branch-name ``` @@ -175,7 +193,6 @@ target-branch-name is the branch to cherry-pick to. cherry-pick-bot will cherry- The bot will immediately try to cherry-pick a merged PR. On unmerged pull request, it will not do anything immediately, but wait until merge. You can comment multiple times on a PR for multiple release branches. ## Manual Backporting Fixes - The bot will fail to cherry-pick if the feature branches' git history is not linear (merge commits instead of rebase). In that case, you will need to manually cherry-pick the squashed merged commit from main to the release branch 1. Switch to the release branch intended for the fix. diff --git a/Dockerfile b/Dockerfile index 992e494900..2fd762f018 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,19 @@ -# syntax=docker/dockerfile:1 +# syntax=docker/dockerfile:1@sha256:865e5dd094beca432e8c0a1d5e1c465db5f998dca4e439981029b3b81fb39ed5 # what distro is the image being built for -ARG ALPINE_TAG=3.19.1 -ARG DEBIAN_TAG=12.5-slim +ARG ALPINE_TAG=3.20.3@sha256:beefdbd8a1da6d2915566fde36db9db0b524eb737fc57cd1367effd16dc0d06d +ARG DEBIAN_TAG=12.7-slim@sha256:36e591f228bb9b99348f584e83f16e012c33ba5cad44ef5981a1d7c0a93eca22 +ARG GOLANG_TAG=1.23.2-alpine@sha256:9dd2625a1ff2859b8d8b01d8f7822c0f528942fe56cfe7a1e7c38d3b8d72d679 # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ARG DEFAULT_TERRAFORM_VERSION=1.7.2 +ARG DEFAULT_TERRAFORM_VERSION=1.9.8 +# renovate: datasource=github-releases depName=opentofu/opentofu versioning=hashicorp +ARG DEFAULT_OPENTOFU_VERSION=1.8.4 # renovate: datasource=github-releases depName=open-policy-agent/conftest -ARG DEFAULT_CONFTEST_VERSION=0.46.0 +ARG DEFAULT_CONFTEST_VERSION=0.56.0 # Stage 1: build artifact and download deps -FROM golang:1.22.0-alpine AS builder +FROM golang:${GOLANG_TAG} AS builder ARG ATLANTIS_VERSION=dev ENV ATLANTIS_VERSION=${ATLANTIS_VERSION} @@ -41,7 +44,7 @@ RUN --mount=type=cache,target=/go/pkg/mod \ --mount=type=cache,target=/root/.cache/go-build \ CGO_ENABLED=0 go build -trimpath -ldflags "-s -w -X 'main.version=${ATLANTIS_VERSION}' -X 'main.commit=${ATLANTIS_COMMIT}' -X 'main.date=${ATLANTIS_DATE}'" -v -o atlantis . -FROM debian:${DEBIAN_TAG} as debian-base +FROM debian:${DEBIAN_TAG} AS debian-base # Install packages needed to run Atlantis. # We place this last as it will bust less docker layer caches when packages update @@ -61,14 +64,13 @@ RUN apt-get update && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* -FROM debian-base as deps +FROM debian-base AS deps # Get the architecture the image is being built for ARG TARGETPLATFORM WORKDIR /tmp/build # install conftest -# renovate: datasource=github-releases depName=open-policy-agent/conftest ARG DEFAULT_CONFTEST_VERSION ENV DEFAULT_CONFTEST_VERSION=${DEFAULT_CONFTEST_VERSION} SHELL ["/bin/bash", "-o", "pipefail", "-c"] @@ -92,7 +94,7 @@ RUN AVAILABLE_CONFTEST_VERSIONS=${DEFAULT_CONFTEST_VERSION} && \ # install git-lfs # renovate: datasource=github-releases depName=git-lfs/git-lfs -ENV GIT_LFS_VERSION=3.4.1 +ENV GIT_LFS_VERSION=3.5.1 RUN case ${TARGETPLATFORM} in \ "linux/amd64") GIT_LFS_ARCH=amd64 ;; \ @@ -106,31 +108,26 @@ RUN case ${TARGETPLATFORM} in \ git-lfs --version # install terraform binaries -# renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp ARG DEFAULT_TERRAFORM_VERSION ENV DEFAULT_TERRAFORM_VERSION=${DEFAULT_TERRAFORM_VERSION} +ARG DEFAULT_OPENTOFU_VERSION +ENV DEFAULT_OPENTOFU_VERSION=${DEFAULT_OPENTOFU_VERSION} + +# COPY scripts/download-release.sh . +COPY --from=builder /app/scripts/download-release.sh download-release.sh # In the official Atlantis image, we only have the latest of each Terraform version. # Each binary is about 80 MB so we limit it to the 4 latest minor releases or fewer -RUN AVAILABLE_TERRAFORM_VERSIONS="1.4.7 1.5.7 1.6.6 ${DEFAULT_TERRAFORM_VERSION}" && \ - case "${TARGETPLATFORM}" in \ - "linux/amd64") TERRAFORM_ARCH=amd64 ;; \ - "linux/arm64") TERRAFORM_ARCH=arm64 ;; \ - "linux/arm/v7") TERRAFORM_ARCH=arm ;; \ - *) echo "ERROR: 'TARGETPLATFORM' value expected: ${TARGETPLATFORM}"; exit 1 ;; \ - esac && \ - for VERSION in ${AVAILABLE_TERRAFORM_VERSIONS}; do \ - curl -LOs "https://releases.hashicorp.com/terraform/${VERSION}/terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" && \ - curl -LOs "https://releases.hashicorp.com/terraform/${VERSION}/terraform_${VERSION}_SHA256SUMS" && \ - sed -n "/terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip/p" "terraform_${VERSION}_SHA256SUMS" | sha256sum -c && \ - mkdir -p "/usr/local/bin/tf/versions/${VERSION}" && \ - unzip "terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" -d "/usr/local/bin/tf/versions/${VERSION}" && \ - ln -s "/usr/local/bin/tf/versions/${VERSION}/terraform" "/usr/local/bin/terraform${VERSION}" && \ - rm "terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" && \ - rm "terraform_${VERSION}_SHA256SUMS"; \ - done && \ - ln -s "/usr/local/bin/tf/versions/${DEFAULT_TERRAFORM_VERSION}/terraform" /usr/local/bin/terraform - +RUN ./download-release.sh \ + "terraform" \ + "${TARGETPLATFORM}" \ + "${DEFAULT_TERRAFORM_VERSION}" \ + "1.6.6 1.7.5 1.8.5 ${DEFAULT_TERRAFORM_VERSION}" \ + && ./download-release.sh \ + "tofu" \ + "${TARGETPLATFORM}" \ + "${DEFAULT_OPENTOFU_VERSION}" \ + "${DEFAULT_OPENTOFU_VERSION}" # Stage 2 - Alpine # Creating the individual distro builds using targets @@ -150,24 +147,28 @@ RUN addgroup atlantis && \ # copy atlantis binary COPY --from=builder /app/atlantis /usr/local/bin/atlantis # copy terraform binaries -COPY --from=deps /usr/local/bin/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/terraform/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/tofu/tofu* /usr/local/bin/ # copy dependencies COPY --from=deps /usr/local/bin/conftest /usr/local/bin/conftest COPY --from=deps /usr/bin/git-lfs /usr/bin/git-lfs COPY docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh +# renovate: datasource=repology depName=alpine_3_20/ca-certificates versioning=loose +ENV CA_CERTIFICATES_VERSION="20240705-r0" + # Install packages needed to run Atlantis. # We place this last as it will bust less docker layer caches when packages update RUN apk add --no-cache \ - ca-certificates~=20230506 \ + ca-certificates~=${CA_CERTIFICATES_VERSION} \ curl~=8 \ git~=2 \ unzip~=6 \ bash~=5 \ openssh~=9 \ dumb-init~=1 \ - gcompat~=1 - + gcompat~=1 \ + coreutils-env~=9 # Set the entry point to the atlantis user and run the atlantis command USER atlantis @@ -190,7 +191,8 @@ RUN useradd --create-home --user-group --shell /bin/bash atlantis && \ # copy atlantis binary COPY --from=builder /app/atlantis /usr/local/bin/atlantis # copy terraform binaries -COPY --from=deps /usr/local/bin/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/terraform/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/tofu/tofu* /usr/local/bin/ # copy dependencies COPY --from=deps /usr/local/bin/conftest /usr/local/bin/conftest COPY --from=deps /usr/bin/git-lfs /usr/bin/git-lfs diff --git a/Dockerfile.dev b/Dockerfile.dev index f85a5555e2..fcf3b15496 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,3 +1,3 @@ -FROM ghcr.io/runatlantis/atlantis:latest +FROM ghcr.io/runatlantis/atlantis:latest@sha256:5318d83dc11546c30ea2487108f465c7e000c8d190c66bf925fc3dba7a993d3f COPY atlantis /usr/local/bin/atlantis WORKDIR /atlantis/src diff --git a/GOVERNANCE.md b/GOVERNANCE.md deleted file mode 100644 index 9b4454e111..0000000000 --- a/GOVERNANCE.md +++ /dev/null @@ -1,116 +0,0 @@ -# Atlantis Governance - -This document defines the project governance for Atlantis. - -## Overview - -**Atlantis** is committed to building an open, inclusive, productive and self-governing open source -community focused on building a high-quality infrastructure orchestration system. The -community is governed by this document with the goal of defining how community -should work together to achieve this goal. - -## Code Repositories - -The following code repositories are governed by Atlantis community and -maintained under the `runatlantis` organization. - -* **[atlantis](https://github.com/runatlantis/atlantis):** Main Atlantis codebase. -* **[atlantis-helm-charts](https://github.com/runatlantis/helm-charts):** Helm chart for easy deployment of Atlantis. -* **[atlantis-tests](https://github.com/runatlantis/atlantis-tests):** A set of terraform projects that atlantis e2e tests run on. -* **[atlantis-example](https://github.com/runatlantis/atlantis-example):** A simple terraform project to use along with atlantis bootstrap mode. - -## Community Roles - -* **Users:** Members that engage with the Atlantis community via any medium (Slack, GitHub, mailing lists, etc.). -* **Contributors:** Regular contributions to projects (documentation, code reviews, responding to issues, participation in proposal discussions, contributing code, etc.). -* **Core Contributors:** Contributors who drive certain subprojects within Atlantis. They are responsible for the direction and work done within that subproject, providing enhancements and support for the Atlantis project as a whole. Core Contributors are expected to contribute code and documentation, review PRs including ensuring quality of code, triage issues, proactively fix bugs, and perform maintenance tasks for the subprojects they are responsible for. -* **Maintainers:** The Atlantis project leaders. They are responsible for the overall health and direction of the project; final reviewers of PRs and responsible for releases. Some Maintainers are responsible for one or more components within a project, acting as technical leads for that component. Maintainers are expected to contribute code and documentation, review PRs including ensuring quality of code, triage issues, proactively fix bugs, and perform maintenance tasks for these components. - -### Maintainers - -New maintainers and subproject maintainers must be nominated by an existing maintainer and must be elected by a supermajority of existing maintainers. Likewise, maintainers can be removed by a supermajority of the existing maintainers or can resign by notifying one of the maintainers. - -### Supermajority - -A supermajority is defined as two-thirds of members in the group. -A supermajority of [Maintainers](#maintainers) is required for certain -decisions as outlined above. Voting on decisions can happen on the mailing list, GitHub, Slack, email, or via a voting service, when appropriate. Maintainers can either vote "agree, yes, +1", "disagree, no, -1", or "abstain". A vote passes when supermajority is met. An abstain vote equals not voting at all. - -### Decision Making - -Ideally, all project decisions are resolved by consensus. If impossible, any -maintainer may call a vote. Unless otherwise specified in this document, any -vote will be decided by a supermajority of maintainers. - -Votes by maintainers belonging to the same company -will count as one vote; e.g., 4 maintainers employed by fictional company **Fictiousum** will -only have **one** combined vote. If voting members from a given company do not -agree, the company's vote is determined by a supermajority of voters from that -company. If no supermajority is achieved, the company is considered to have -abstained. - -## Proposal Process - -One of the most important aspects in any open source community is the concept -of proposals. Large changes to the codebase and/or new features should be -preceded by a proposal as an ADR or GH issue in the main Atlantis repo. This process allows for all -members of the community to weigh in on the concept (including the technical -details), share their comments and ideas, and offer to help. It also ensures -that members are not duplicating work or inadvertently stepping on toes by -making large conflicting changes. - -The project roadmap is defined by accepted proposals. - -Proposals should cover the high-level objectives, use cases, and technical -recommendations on how to implement. In general, the community member(s) -interested in implementing the proposal should be either deeply engaged in the -proposal process or be an author of the proposal. - -The proposal should be documented as a separated markdown file pushed to the root of the -`docs/adr` folder in the [atlantis](https://github.com/runatlantis/atlantis) -repository via PR. The name of the file should follow the name pattern set by the ADR process `<####-short -meaningful words joined by '-'>.md`, e.g: -`0002-adr-proposal.md`. - -Use the [ADR Tools](https://github.com/npryce/adr-tools) and run `adr new ` - -### Proposal Lifecycle - -The proposal PR can be marked with different status labels to represent the -status of the proposal: - -* **New**: Proposal is just created. -* **Reviewing**: Proposal is under review and discussion. -* **Accepted**: Proposal is reviewed and accepted (either by consensus or vote). -* **Rejected**: Proposal is reviewed and rejected (either by consensus or vote). - -## Lazy Consensus - -To maintain velocity in a project as busy as Atlantis, the concept of [Lazy -Consensus](http://en.osswiki.info/concepts/lazy_consensus) is practiced. Ideas -and/or proposals should be shared by maintainers via -GitHub with the appropriate maintainer groups (e.g., -`@atlantis/all-maintainers`) tagged. Out of respect for other contributors, -major changes should also be accompanied by a ping on Slack in the -[#contributors](https://atlantis-community.slack.com/archives/C04ES70Q6E8) channel or a note on the -Atlantis google mailing list as appropriate. Author(s) of proposal, Pull Requests, -issues, etc. will give a time period of no less than five (5) working days for -comment and remain cognizant of popular observed world holidays. - -Other maintainers may chime in and request additional time for review, but -should remain cognizant of blocking progress and abstain from delaying -progress unless absolutely needed. The expectation is that blocking progress -is accompanied by a guarantee to review and respond to the relevant action(s) -(proposals, PRs, issues, etc.) in short order. - -Lazy Consensus is practiced for all projects in the `runatlantis` org, including -the main project repository, community-driven sub-projects, and the community -repo that includes proposals and governing documents. - -Lazy consensus does _not_ apply to the process of: - -* Removal of maintainers from Atlantis - -## Updating Governance - -All substantive changes in Governance require a supermajority agreement by all maintainers. diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 688de90946..d590cf3e02 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -1,8 +1,8 @@ The current Maintainers Group for the [Atlantis] Project consists of: -| Name | GitHub ID | Employer | Responsibilities | -| ------------- | --------- | -------- | ---------------- | -| Dylan Page | [GenPage](https://github.com/GenPage) | Autodesk | Maintainer | -| PePe Amengual | [jamengual](https://github.com/jamengual) | Slalom | Maintainer | -| Rui Chen | [chenrui333](https://github.com/chenrui333) | Meetup | Maintainer | -| Ronak | [nitrocode](https://github.com/nitrocode) | RB Consulting LLC | Core Contributor | \ No newline at end of file +| Name | GitHub ID | Employer | Responsibilities | +| ------------- | ------------------------------------------- | ----------------- | ---------------- | +| Dylan Page | [GenPage](https://github.com/GenPage) | Lambda | Maintainer | +| PePe Amengual | [jamengual](https://github.com/jamengual) | Slalom | Maintainer | +| Rui Chen | [chenrui333](https://github.com/chenrui333) | Meetup | Maintainer | +| Ronak | [nitrocode](https://github.com/nitrocode) | RB Consulting LLC | Core Contributor | diff --git a/Makefile b/Makefile index 1678ef588e..620e01bf95 100644 --- a/Makefile +++ b/Makefile @@ -6,6 +6,9 @@ IMAGE_NAME := runatlantis/atlantis .DEFAULT_GOAL := help +# renovate: datasource=github-releases depName=golangci/golangci-lint +GOLANGCI_LINT_VERSION := v1.59.1 + .PHONY: help help: ## List targets & descriptions @cat Makefile* | grep -E '^[a-zA-Z\/_-]+:.*?## .*$$' | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' @@ -93,12 +96,12 @@ lint: ## Run linter locally .PHONY: check-lint check-lint: ## Run linter in CI/CD. If running locally use 'lint' - curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b ./bin v1.49.0 + curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b ./bin $(GOLANGCI_LINT_VERSION) ./bin/golangci-lint run -j 4 --timeout 5m .PHONY: check-fmt check-fmt: ## Fail if not formatted - if [[ $$(goimports -l $$(find . -type f -name '*.go' ! -path "./vendor/*" ! -path "**/mocks/*")) ]]; then exit 1; fi + ./scripts/fmt.sh .PHONY: end-to-end-deps end-to-end-deps: ## Install e2e dependencies @@ -110,4 +113,4 @@ end-to-end-tests: ## Run e2e tests .PHONY: website-dev website-dev: ## Run runatlantic.io on localhost:8080 - pnpm website:dev + npm website:dev diff --git a/README.md b/README.md index 6471847770..33c8b87bfa 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,15 @@ # Atlantis <!-- omit in toc --> [![Latest Release](https://img.shields.io/github/release/runatlantis/atlantis.svg)](https://github.com/runatlantis/atlantis/releases/latest) -[![SuperDopeBadge](./runatlantis.io/.vuepress/public/hightower-super-dope.svg)](https://twitter.com/kelseyhightower/status/893260922222813184) +[![SuperDopeBadge](./runatlantis.io/public/hightower-super-dope.svg)](https://twitter.com/kelseyhightower/status/893260922222813184) [![Go Report Card](https://goreportcard.com/badge/github.com/runatlantis/atlantis)](https://goreportcard.com/report/github.com/runatlantis/atlantis) [![Go Reference](https://pkg.go.dev/badge/github.com/runatlantis/atlantis.svg)](https://pkg.go.dev/github.com/runatlantis/atlantis) -[![codecov](https://codecov.io/gh/runatlantis/atlantis/branch/main/graph/badge.svg)](https://codecov.io/gh/runatlantis/atlantis) -[![CircleCI](https://circleci.com/gh/runatlantis/atlantis/tree/main.svg?style=shield)](https://circleci.com/gh/runatlantis/atlantis/tree/main) -[![Slack](https://img.shields.io/badge/Join-Atlantis%20Community%20Slack-red)](https://join.slack.com/t/atlantis-community/shared_invite/zt-1nt7yx7uq-AnVRc_JItF1CDwZtfqv_OA) +[![Slack](https://img.shields.io/badge/Join-Atlantis%20Community%20Slack-red)](https://communityinviter.com/apps/cloud-native/cncf) +[![OpenSSF Scorecard](https://api.scorecard.dev/projects/github.com/runatlantis/atlantis/badge)](https://scorecard.dev/viewer/?uri=github.com/runatlantis/atlantis) +[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9428/badge)](https://www.bestpractices.dev/projects/9428) <p align="center"> - <img src="./runatlantis.io/.vuepress/public/hero.png" alt="Atlantis Logo"/><br><br> + <img src="./runatlantis.io/public/hero.png" alt="Atlantis Logo"/><br><br> <b>Terraform Pull Request Automation</b> </p> @@ -23,7 +23,7 @@ * How to get started: [www.runatlantis.io/guide](https://www.runatlantis.io/guide) * Full documentation: [www.runatlantis.io/docs](https://www.runatlantis.io/docs) * Download the latest release: [github.com/runatlantis/atlantis/releases/latest](https://github.com/runatlantis/atlantis/releases/latest) -* Get help in our [Slack channel](https://join.slack.com/t/atlantis-community/shared_invite/zt-1nt7yx7uq-AnVRc_JItF1CDwZtfqv_OA) +* Get help in our [Slack channel](https://communityinviter.com/apps/cloud-native/cncf) in channel #atlantis-community and development in #atlantis-contributors * Start Contributing: [CONTRIBUTING.md](CONTRIBUTING.md) ## What is Atlantis? diff --git a/SECURITY.md b/SECURITY.md index 7c6f94f07e..0f989d42de 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -3,3 +3,7 @@ ## Reporting a Vulnerability We take security issues seriously. Please report a security vulnerability to the maintainers using [private vulnerability reporting](https://github.com/runatlantis/atlantis/security/advisories/new). + +## Maintained releases + +Only the two latest minor releases are maintained. For example, if `0.29.7` is the latest, then `0.29.x` and `0.28.x` will receive security fixes. diff --git a/cmd/server.go b/cmd/server.go index 73dc7b71c6..116ecf4fdd 100644 --- a/cmd/server.go +++ b/cmd/server.go @@ -37,6 +37,12 @@ const ( CheckoutStrategyMerge = "merge" ) +// TF distributions +const ( + TFDistributionTerraform = "terraform" + TFDistributionOpenTofu = "opentofu" +) + // To add a new flag you must: // 1. Add a const with the flag name (in alphabetic order). // 2. Add a new field to server.UserConfig and set the mapstructure tag equal to the flag name. @@ -76,23 +82,30 @@ const ( DisableUnlockLabelFlag = "disable-unlock-label" DiscardApprovalOnPlanFlag = "discard-approval-on-plan" EmojiReaction = "emoji-reaction" + EnableDiffMarkdownFormat = "enable-diff-markdown-format" EnablePolicyChecksFlag = "enable-policy-checks" EnableRegExpCmdFlag = "enable-regexp-cmd" - EnableDiffMarkdownFormat = "enable-diff-markdown-format" ExecutableName = "executable-name" FailOnPreWorkflowHookError = "fail-on-pre-workflow-hook-error" HideUnchangedPlanComments = "hide-unchanged-plan-comments" GHHostnameFlag = "gh-hostname" GHTeamAllowlistFlag = "gh-team-allowlist" GHTokenFlag = "gh-token" + GHTokenFileFlag = "gh-token-file" // nolint: gosec GHUserFlag = "gh-user" GHAppIDFlag = "gh-app-id" GHAppKeyFlag = "gh-app-key" GHAppKeyFileFlag = "gh-app-key-file" GHAppSlugFlag = "gh-app-slug" + GHAppInstallationIDFlag = "gh-app-installation-id" GHOrganizationFlag = "gh-org" GHWebhookSecretFlag = "gh-webhook-secret" // nolint: gosec GHAllowMergeableBypassApply = "gh-allow-mergeable-bypass-apply" // nolint: gosec + GiteaBaseURLFlag = "gitea-base-url" + GiteaTokenFlag = "gitea-token" + GiteaUserFlag = "gitea-user" + GiteaWebhookSecretFlag = "gitea-webhook-secret" // nolint: gosec + GiteaPageSizeFlag = "gitea-page-size" GitlabHostnameFlag = "gitlab-hostname" GitlabTokenFlag = "gitlab-token" GitlabUserFlag = "gitlab-user" @@ -104,6 +117,7 @@ const ( LockingDBType = "locking-db-type" LogLevelFlag = "log-level" MarkdownTemplateOverridesDirFlag = "markdown-template-overrides-dir" + MaxCommentsPerCommand = "max-comments-per-command" ParallelPoolSize = "parallel-pool-size" StatsNamespace = "stats-namespace" AllowDraftPRs = "allow-draft-prs" @@ -127,11 +141,13 @@ const ( SSLCertFileFlag = "ssl-cert-file" SSLKeyFileFlag = "ssl-key-file" RestrictFileList = "restrict-file-list" + TFDistributionFlag = "tf-distribution" TFDownloadFlag = "tf-download" TFDownloadURLFlag = "tf-download-url" UseTFPluginCache = "use-tf-plugin-cache" VarFileAllowlistFlag = "var-file-allowlist" VCSStatusName = "vcs-status-name" + IgnoreVCSStatusNames = "ignore-vcs-status-names" TFEHostnameFlag = "tfe-hostname" TFELocalExecutionModeFlag = "tfe-local-execution-mode" TFETokenFlag = "tfe-token" @@ -152,13 +168,17 @@ const ( DefaultCheckoutDepth = 0 DefaultBitbucketBaseURL = bitbucketcloud.BaseURL DefaultDataDir = "~/.atlantis" - DefaultEmojiReaction = "eyes" + DefaultEmojiReaction = "" DefaultExecutableName = "atlantis" DefaultMarkdownTemplateOverridesDir = "~/.markdown_templates" DefaultGHHostname = "github.com" + DefaultGiteaBaseURL = "https://gitea.com" + DefaultGiteaPageSize = 30 DefaultGitlabHostname = "gitlab.com" DefaultLockingDBType = "boltdb" DefaultLogLevel = "info" + DefaultIgnoreVCSStatusNames = "" + DefaultMaxCommentsPerCommand = 100 DefaultParallelPoolSize = 15 DefaultStatsNamespace = "atlantis" DefaultPort = 4141 @@ -166,6 +186,7 @@ const ( DefaultRedisPort = 6379 DefaultRedisTLSEnabled = false DefaultRedisInsecureSkipVerify = false + DefaultTFDistribution = TFDistributionTerraform DefaultTFDownloadURL = "https://releases.hashicorp.com" DefaultTFDownload = true DefaultTFEHostname = "app.terraform.io" @@ -268,7 +289,7 @@ var stringFlags = map[string]stringFlag{ defaultValue: "", }, EmojiReaction: { - description: "Emoji Reaction to use to react to comments", + description: "Emoji Reaction to use to react to comments.", defaultValue: DefaultEmojiReaction, }, ExecutableName: { @@ -297,6 +318,9 @@ var stringFlags = map[string]stringFlag{ GHTokenFlag: { description: "GitHub token of API user. Can also be specified via the ATLANTIS_GH_TOKEN environment variable.", }, + GHTokenFileFlag: { + description: "A path to a file containing the GitHub token of API user. Can also be specified via the ATLANTIS_GH_TOKEN_FILE environment variable.", + }, GHAppKeyFlag: { description: "The GitHub App's private key", defaultValue: "", @@ -318,6 +342,22 @@ var stringFlags = map[string]stringFlag{ "This means that an attacker could spoof calls to Atlantis and cause it to perform malicious actions. " + "Should be specified via the ATLANTIS_GH_WEBHOOK_SECRET environment variable.", }, + GiteaBaseURLFlag: { + description: "Base URL of Gitea server installation. Must include 'http://' or 'https://'.", + }, + GiteaUserFlag: { + description: "Gitea username of API user.", + defaultValue: "", + }, + GiteaTokenFlag: { + description: "Gitea token of API user. Can also be specified via the ATLANTIS_GITEA_TOKEN environment variable.", + }, + GiteaWebhookSecretFlag: { + description: "Optional secret used to validate Gitea webhooks." + + " SECURITY WARNING: If not specified, Atlantis won't be able to validate that the incoming webhook call came from Gitea. " + + "This means that an attacker could spoof calls to Atlantis and cause it to perform malicious actions. " + + "Should be specified via the ATLANTIS_GITEA_WEBHOOK_SECRET environment variable.", + }, GitlabHostnameFlag: { description: "Hostname of your GitLab Enterprise installation. If using gitlab.com, no need to set.", defaultValue: DefaultGitlabHostname, @@ -380,6 +420,10 @@ var stringFlags = map[string]stringFlag{ SSLKeyFileFlag: { description: fmt.Sprintf("File containing x509 private key matching --%s.", SSLCertFileFlag), }, + TFDistributionFlag: { + description: fmt.Sprintf("Which TF distribution to use. Can be set to %s or %s.", TFDistributionTerraform, TFDistributionOpenTofu), + defaultValue: DefaultTFDistribution, + }, TFDownloadURLFlag: { description: "Base URL to download Terraform versions from.", defaultValue: DefaultTFDownloadURL, @@ -401,6 +445,12 @@ var stringFlags = map[string]stringFlag{ description: "Comma-separated list of additional paths where variable definition files can be read from." + " If this argument is not provided, it defaults to Atlantis' data directory, determined by the --data-dir argument.", }, + IgnoreVCSStatusNames: { + description: "Comma separated list of VCS status names from other atlantis services." + + " When `gh-allow-mergeable-bypass-apply` is true, will ignore status checks (e.g. `status1/plan`, `status1/apply`, `status2/plan`, `status2/apply`) from other Atlantis services when checking if the PR is mergeable." + + " Currently only implemented for GitHub.", + defaultValue: DefaultIgnoreVCSStatusNames, + }, VCSStatusName: { description: "Name used to identify Atlantis for pull request statuses.", defaultValue: DefaultVCSStatusName, @@ -436,6 +486,7 @@ var boolFlags = map[string]boolFlag{ description: "Disable atlantis auto planning feature", defaultValue: false, }, + DisableRepoLockingFlag: { description: "Disable atlantis locking repos", }, @@ -568,6 +619,14 @@ var intFlags = map[string]intFlag{ " If merge base is further behind than this number of commits from any of branches heads, full fetch will be performed.", defaultValue: DefaultCheckoutDepth, }, + MaxCommentsPerCommand: { + description: "If non-zero, the maximum number of comments to split command output into before truncating.", + defaultValue: DefaultMaxCommentsPerCommand, + }, + GiteaPageSizeFlag: { + description: "Optional value that specifies the number of results per page to expect from Gitea.", + defaultValue: DefaultGiteaPageSize, + }, ParallelPoolSize: { description: "Max size of the wait group that runs parallel plans and applies (if enabled).", defaultValue: DefaultParallelPoolSize, @@ -591,6 +650,13 @@ var int64Flags = map[string]int64Flag{ description: "GitHub App Id. If defined, initializes the GitHub client with app-based credentials", defaultValue: 0, }, + GHAppInstallationIDFlag: { + description: "GitHub App Installation Id. If defined, initializes the GitHub client with app-based credentials " + + "using this specific GitHub Application Installation ID, otherwise it attempts to auto-detect it. " + + "Note that this value must be set if you want to have one App and multiple installations of that same " + + "application.", + defaultValue: 0, + }, } // ValidLogLevels are the valid log levels that can be set @@ -747,7 +813,7 @@ func (s *ServerCmd) run() error { if err := s.Viper.Unmarshal(&userConfig); err != nil { return err } - s.setDefaults(&userConfig) + s.setDefaults(&userConfig, s.Viper) // Now that we've parsed the config we can set our local logger to the // right level. @@ -788,7 +854,7 @@ func (s *ServerCmd) run() error { return server.Start() } -func (s *ServerCmd) setDefaults(c *server.UserConfig) { +func (s *ServerCmd) setDefaults(c *server.UserConfig, v *viper.Viper) { if c.AzureDevOpsHostname == "" { c.AzureDevOpsHostname = DefaultADHostname } @@ -813,6 +879,12 @@ func (s *ServerCmd) setDefaults(c *server.UserConfig) { if c.GitlabHostname == "" { c.GitlabHostname = DefaultGitlabHostname } + if c.GiteaBaseURL == "" { + c.GiteaBaseURL = DefaultGiteaBaseURL + } + if c.GiteaPageSize == 0 { + c.GiteaPageSize = DefaultGiteaPageSize + } if c.BitbucketBaseURL == "" { c.BitbucketBaseURL = DefaultBitbucketBaseURL } @@ -831,6 +903,9 @@ func (s *ServerCmd) setDefaults(c *server.UserConfig) { if c.MarkdownTemplateOverridesDir == "" { c.MarkdownTemplateOverridesDir = DefaultMarkdownTemplateOverridesDir } + if !v.IsSet("max-comments-per-command") { + c.MaxCommentsPerCommand = DefaultMaxCommentsPerCommand + } if c.ParallelPoolSize == 0 { c.ParallelPoolSize = DefaultParallelPoolSize } @@ -846,12 +921,18 @@ func (s *ServerCmd) setDefaults(c *server.UserConfig) { if c.RedisPort == 0 { c.RedisPort = DefaultRedisPort } + if c.TFDistribution == "" { + c.TFDistribution = DefaultTFDistribution + } if c.TFDownloadURL == "" { c.TFDownloadURL = DefaultTFDownloadURL } if c.VCSStatusName == "" { c.VCSStatusName = DefaultVCSStatusName } + if c.IgnoreVCSStatusNames == "" { + c.IgnoreVCSStatusNames = DefaultIgnoreVCSStatusNames + } if c.TFEHostname == "" { c.TFEHostname = DefaultTFEHostname } @@ -872,6 +953,11 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error { return fmt.Errorf("invalid log level: must be one of %v", ValidLogLevels) } + if userConfig.TFDistribution != TFDistributionTerraform && userConfig.TFDistribution != TFDistributionOpenTofu { + return fmt.Errorf("invalid tf distribution: expected one of %s or %s", + TFDistributionTerraform, TFDistributionOpenTofu) + } + checkoutStrategy := userConfig.CheckoutStrategy if checkoutStrategy != CheckoutStrategyBranch && checkoutStrategy != CheckoutStrategyMerge { return fmt.Errorf("invalid checkout strategy: not one of %s or %s", @@ -883,25 +969,33 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error { } // The following combinations are valid. - // 1. github user and token set + // 1. github user and (token or token file) // 2. github app ID and (key file set or key set) - // 3. gitlab user and token set - // 4. bitbucket user and token set - // 5. azuredevops user and token set - // 6. any combination of the above - vcsErr := fmt.Errorf("--%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s must be set", GHUserFlag, GHTokenFlag, GHAppIDFlag, GHAppKeyFileFlag, GHAppIDFlag, GHAppKeyFlag, GitlabUserFlag, GitlabTokenFlag, BitbucketUserFlag, BitbucketTokenFlag, ADUserFlag, ADTokenFlag) - if ((userConfig.GithubUser == "") != (userConfig.GithubToken == "")) || ((userConfig.GitlabUser == "") != (userConfig.GitlabToken == "")) || ((userConfig.BitbucketUser == "") != (userConfig.BitbucketToken == "")) || ((userConfig.AzureDevopsUser == "") != (userConfig.AzureDevopsToken == "")) { + // 3. gitea user and token set + // 4. gitlab user and token set + // 5. bitbucket user and token set + // 6. azuredevops user and token set + // 7. any combination of the above + vcsErr := fmt.Errorf("--%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s must be set", GHUserFlag, GHTokenFlag, GHUserFlag, GHTokenFileFlag, GHAppIDFlag, GHAppKeyFileFlag, GHAppIDFlag, GHAppKeyFlag, GiteaUserFlag, GiteaTokenFlag, GitlabUserFlag, GitlabTokenFlag, BitbucketUserFlag, BitbucketTokenFlag, ADUserFlag, ADTokenFlag) + if ((userConfig.GiteaUser == "") != (userConfig.GiteaToken == "")) || + ((userConfig.GitlabUser == "") != (userConfig.GitlabToken == "")) || + ((userConfig.BitbucketUser == "") != (userConfig.BitbucketToken == "")) || + ((userConfig.AzureDevopsUser == "") != (userConfig.AzureDevopsToken == "")) { return vcsErr } - if (userConfig.GithubAppID != 0) && ((userConfig.GithubAppKey == "") && (userConfig.GithubAppKeyFile == "")) { - return vcsErr + if userConfig.GithubUser != "" { + if (userConfig.GithubToken == "") == (userConfig.GithubTokenFile == "") { + return vcsErr + } } - if (userConfig.GithubAppID == 0) && ((userConfig.GithubAppKey != "") || (userConfig.GithubAppKeyFile != "")) { - return vcsErr + if userConfig.GithubAppID != 0 { + if (userConfig.GithubAppKey == "") == (userConfig.GithubAppKeyFile == "") { + return vcsErr + } } // At this point, we know that there can't be a single user/token without // its partner, but we haven't checked if any user/token is set at all. - if userConfig.GithubAppID == 0 && userConfig.GithubUser == "" && userConfig.GitlabUser == "" && userConfig.BitbucketUser == "" && userConfig.AzureDevopsUser == "" { + if userConfig.GithubAppID == 0 && userConfig.GithubUser == "" && userConfig.GiteaUser == "" && userConfig.GitlabUser == "" && userConfig.BitbucketUser == "" && userConfig.AzureDevopsUser == "" { return vcsErr } @@ -920,6 +1014,14 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error { return fmt.Errorf("--%s must have http:// or https://, got %q", BitbucketBaseURLFlag, userConfig.BitbucketBaseURL) } + parsed, err = url.Parse(userConfig.GiteaBaseURL) + if err != nil { + return fmt.Errorf("error parsing --%s flag value %q: %s", GiteaWebhookSecretFlag, userConfig.GiteaBaseURL, err) + } + if parsed.Scheme != "http" && parsed.Scheme != "https" { + return fmt.Errorf("--%s must have http:// or https://, got %q", GiteaBaseURLFlag, userConfig.GiteaBaseURL) + } + if userConfig.RepoConfig != "" && userConfig.RepoConfigJSON != "" { return fmt.Errorf("cannot use --%s and --%s at the same time", RepoConfigFlag, RepoConfigJSONFlag) } @@ -927,11 +1029,14 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error { // Warn if any tokens have newlines. for name, token := range map[string]string{ GHTokenFlag: userConfig.GithubToken, + GHTokenFileFlag: userConfig.GithubTokenFile, GHWebhookSecretFlag: userConfig.GithubWebhookSecret, GitlabTokenFlag: userConfig.GitlabToken, GitlabWebhookSecretFlag: userConfig.GitlabWebhookSecret, BitbucketTokenFlag: userConfig.BitbucketToken, BitbucketWebhookSecretFlag: userConfig.BitbucketWebhookSecret, + GiteaTokenFlag: userConfig.GiteaToken, + GiteaWebhookSecretFlag: userConfig.GiteaWebhookSecret, } { if strings.Contains(token, "\n") { s.Logger.Warn("--%s contains a newline which is usually unintentional", name) @@ -1025,6 +1130,7 @@ func (s *ServerCmd) setVarFileAllowlist(userConfig *server.UserConfig) { // trimAtSymbolFromUsers trims @ from the front of the github and gitlab usernames func (s *ServerCmd) trimAtSymbolFromUsers(userConfig *server.UserConfig) { userConfig.GithubUser = strings.TrimPrefix(userConfig.GithubUser, "@") + userConfig.GiteaUser = strings.TrimPrefix(userConfig.GiteaUser, "@") userConfig.GitlabUser = strings.TrimPrefix(userConfig.GitlabUser, "@") userConfig.BitbucketUser = strings.TrimPrefix(userConfig.BitbucketUser, "@") userConfig.AzureDevopsUser = strings.TrimPrefix(userConfig.AzureDevopsUser, "@") @@ -1034,6 +1140,9 @@ func (s *ServerCmd) securityWarnings(userConfig *server.UserConfig) { if userConfig.GithubUser != "" && userConfig.GithubWebhookSecret == "" && !s.SilenceOutput { s.Logger.Warn("no GitHub webhook secret set. This could allow attackers to spoof requests from GitHub") } + if userConfig.GiteaUser != "" && userConfig.GiteaWebhookSecret == "" && !s.SilenceOutput { + s.Logger.Warn("no Gitea webhook secret set. This could allow attackers to spoof requests from Gitea") + } if userConfig.GitlabUser != "" && userConfig.GitlabWebhookSecret == "" && !s.SilenceOutput { s.Logger.Warn("no GitLab webhook secret set. This could allow attackers to spoof requests from GitLab") } diff --git a/cmd/server_test.go b/cmd/server_test.go index f7f53fa003..2c96a4f1f8 100644 --- a/cmd/server_test.go +++ b/cmd/server_test.go @@ -86,13 +86,20 @@ var testFlags = map[string]interface{}{ GHHostnameFlag: "ghhostname", GHTeamAllowlistFlag: "", GHTokenFlag: "token", + GHTokenFileFlag: "", GHUserFlag: "user", GHAppIDFlag: int64(0), GHAppKeyFlag: "", GHAppKeyFileFlag: "", GHAppSlugFlag: "atlantis", + GHAppInstallationIDFlag: int64(0), GHOrganizationFlag: "", GHWebhookSecretFlag: "secret", + GiteaBaseURLFlag: "http://localhost", + GiteaTokenFlag: "gitea-token", + GiteaUserFlag: "gitea-user", + GiteaWebhookSecretFlag: "gitea-secret", + GiteaPageSizeFlag: 30, GitlabHostnameFlag: "gitlab-hostname", GitlabTokenFlag: "gitlab-token", GitlabUserFlag: "gitlab-user", @@ -103,6 +110,7 @@ var testFlags = map[string]interface{}{ LockingDBType: "boltdb", LogLevelFlag: "debug", MarkdownTemplateOverridesDirFlag: "/path2", + MaxCommentsPerCommand: 10, StatsNamespace: "atlantis", AllowDraftPRs: true, PortFlag: 8181, @@ -129,6 +137,7 @@ var testFlags = map[string]interface{}{ SSLCertFileFlag: "cert-file", SSLKeyFileFlag: "key-file", RestrictFileList: false, + TFDistributionFlag: "terraform", TFDownloadFlag: true, TFDownloadURLFlag: "https://my-hostname.com", TFEHostnameFlag: "my-hostname", @@ -137,6 +146,7 @@ var testFlags = map[string]interface{}{ UseTFPluginCache: true, VarFileAllowlistFlag: "/path", VCSStatusName: "my-status", + IgnoreVCSStatusNames: "", WebBasicAuthFlag: false, WebPasswordFlag: "atlantis", WebUsernameFlag: "atlantis", @@ -156,6 +166,7 @@ func TestExecute_Defaults(t *testing.T) { c := setup(map[string]interface{}{ GHUserFlag: "user", GHTokenFlag: "token", + GiteaBaseURLFlag: "http://localhost", RepoAllowlistFlag: "*", }, t) err := c.Execute() @@ -174,6 +185,7 @@ func TestExecute_Defaults(t *testing.T) { strExceptions := map[string]string{ GHUserFlag: "user", GHTokenFlag: "token", + GiteaBaseURLFlag: "http://localhost", DataDirFlag: dataDir, MarkdownTemplateOverridesDirFlag: markdownTemplateOverridesDir, AtlantisURLFlag: "http://" + hostname + ":4141", @@ -422,7 +434,7 @@ func TestExecute_ValidateSSLConfig(t *testing.T) { } func TestExecute_ValidateVCSConfig(t *testing.T) { - expErr := "--gh-user/--gh-token or --gh-app-id/--gh-app-key-file or --gh-app-id/--gh-app-key or --gitlab-user/--gitlab-token or --bitbucket-user/--bitbucket-token or --azuredevops-user/--azuredevops-token must be set" + expErr := "--gh-user/--gh-token or --gh-user/--gh-token-file or --gh-app-id/--gh-app-key-file or --gh-app-id/--gh-app-key or --gitea-user/--gitea-token or --gitlab-user/--gitlab-token or --bitbucket-user/--bitbucket-token or --azuredevops-user/--azuredevops-token must be set" cases := []struct { description string flags map[string]interface{} @@ -440,6 +452,13 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { }, true, }, + { + "just gitea token set", + map[string]interface{}{ + GiteaTokenFlag: "token", + }, + true, + }, { "just gitlab token set", map[string]interface{}{ @@ -468,6 +487,13 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { }, true, }, + { + "just gitea user set", + map[string]interface{}{ + GiteaUserFlag: "user", + }, + true, + }, { "just github app set", map[string]interface{}{ @@ -534,6 +560,22 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { }, true, }, + { + "github user and gitea token set", + map[string]interface{}{ + GHUserFlag: "user", + GiteaTokenFlag: "token", + }, + true, + }, + { + "gitea user and github token set", + map[string]interface{}{ + GiteaUserFlag: "user", + GHTokenFlag: "token", + }, + true, + }, { "github user and github token set and should be successful", map[string]interface{}{ @@ -542,6 +584,31 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { }, false, }, + { + "github user and github token file and should be successful", + map[string]interface{}{ + GHUserFlag: "user", + GHTokenFileFlag: "/path/to/token", + }, + false, + }, + { + "github user, github token, and github token file and should fail", + map[string]interface{}{ + GHUserFlag: "user", + GHTokenFlag: "token", + GHTokenFileFlag: "/path/to/token", + }, + true, + }, + { + "gitea user and gitea token set and should be successful", + map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + }, + false, + }, { "github app and key file set and should be successful", map[string]interface{}{ @@ -587,6 +654,8 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { map[string]interface{}{ GHUserFlag: "user", GHTokenFlag: "token", + GiteaUserFlag: "user", + GiteaTokenFlag: "token", GitlabUserFlag: "user", GitlabTokenFlag: "token", BitbucketUserFlag: "user", @@ -699,6 +768,34 @@ func TestExecute_GithubApp(t *testing.T) { Equals(t, int64(1), passedConfig.GithubAppID) } +func TestExecute_GithubAppWithInstallationID(t *testing.T) { + t.Log("Should pass the installation ID to the config.") + c := setup(map[string]interface{}{ + GHAppKeyFlag: testdata.GithubPrivateKey, + GHAppIDFlag: "1", + GHAppInstallationIDFlag: "2", + RepoAllowlistFlag: "*", + }, t) + err := c.Execute() + Ok(t, err) + + Equals(t, int64(1), passedConfig.GithubAppID) + Equals(t, int64(2), passedConfig.GithubAppInstallationID) +} + +func TestExecute_GiteaUser(t *testing.T) { + t.Log("Should remove the @ from the gitea username if it's passed.") + c := setup(map[string]interface{}{ + GiteaUserFlag: "@user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + }, t) + err := c.Execute() + Ok(t, err) + + Equals(t, "user", passedConfig.GiteaUser) +} + func TestExecute_GitlabUser(t *testing.T) { t.Log("Should remove the @ from the gitlab username if it's passed.") c := setup(map[string]interface{}{ @@ -922,3 +1019,45 @@ func configVal(t *testing.T, u server.UserConfig, tag string) interface{} { t.Fatalf("no field with tag %q found", tag) return nil } + +// Gitea base URL must have a scheme. +func TestExecute_GiteaBaseURLScheme(t *testing.T) { + c := setup(map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + GiteaBaseURLFlag: "mydomain.com", + }, t) + ErrEquals(t, "--gitea-base-url must have http:// or https://, got \"mydomain.com\"", c.Execute()) + + c = setup(map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + GiteaBaseURLFlag: "://mydomain.com", + }, t) + ErrEquals(t, "error parsing --gitea-webhook-secret flag value \"://mydomain.com\": parse \"://mydomain.com\": missing protocol scheme", c.Execute()) +} + +func TestExecute_GiteaWithWebhookSecret(t *testing.T) { + c := setup(map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + GiteaWebhookSecretFlag: "my secret", + }, t) + err := c.Execute() + Ok(t, err) +} + +// Port should be retained on base url. +func TestExecute_GiteaBaseURLPort(t *testing.T) { + c := setup(map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + GiteaBaseURLFlag: "http://mydomain.com:7990", + }, t) + Ok(t, c.Execute()) + Equals(t, "http://mydomain.com:7990", passedConfig.GiteaBaseURL) +} diff --git a/docker-compose.yml b/docker-compose.yml index ab2b2f1cab..2d82901687 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,42 +1,41 @@ # Note: This file is only used for Atlantis local development -version: "3.8" services: - ngrok: - image: wernight/ngrok:latest - ports: - - 4040:4040 - environment: - # https://dashboard.ngrok.com/get-started/your-authtoken - # NGROK_AUTH: REPLACE-WITH-YOUR-TOKEN // set this in atlantis.env - NGROK_PROTOCOL: http - NGROK_PORT: atlantis:4141 - env_file: - - ./atlantis.env - depends_on: - - atlantis - redis: - image: redis:7.2-alpine - restart: always - ports: - - '6379:6379' - command: redis-server --save 20 1 --loglevel warning --requirepass test123 - volumes: - - redis:/data - atlantis: - depends_on: - - redis - build: - context: . - dockerfile: Dockerfile.dev - ports: - - 4141:4141 - volumes: - - ~/.ssh:/.ssh - - ./:/atlantis/src - # Contains the flags that atlantis uses in env var form - env_file: - - ./atlantis.env + ngrok: + image: wernight/ngrok:latest@sha256:d211f29ebcfe5f4e72df4fa8bdd9a667886e127d7fcb1be4a1af5ad83a8a1b77 + ports: + - 4040:4040 + environment: + # https://dashboard.ngrok.com/get-started/your-authtoken + # NGROK_AUTH: REPLACE-WITH-YOUR-TOKEN // set this in atlantis.env + NGROK_PROTOCOL: http + NGROK_PORT: atlantis:4141 + env_file: + - atlantis.env + depends_on: + - atlantis + redis: + image: redis:7.4-alpine@sha256:de13e74e14b98eb96bdf886791ae47686c3c5d29f9d5f85ea55206843e3fce26 + restart: always + ports: + - 6379:6379 + command: redis-server --save 20 1 --loglevel warning --requirepass test123 + volumes: + - redis:/data + atlantis: + depends_on: + - redis + build: + context: . + dockerfile: Dockerfile.dev + ports: + - 4141:4141 + volumes: + - ${HOME}/.ssh:/.ssh:ro + - ${PWD}:/atlantis/src:ro + # Contains the flags that atlantis uses in env var form + env_file: + - atlantis.env volumes: - redis: - driver: local + redis: + driver: local diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index 18a8f6ceaf..4f307e1666 100755 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -1,4 +1,10 @@ -#!/usr/bin/dumb-init /bin/sh +#!/usr/bin/env -S dumb-init --single-child /bin/sh + +# dumb-init is run in single child mode. By default dumb-init will forward +# interrupts to all child processes, causing Terraform to cancel and Terraform +# providers to exit uncleanly. We forward the signal to Atlantis only, allowing +# it to trap the interrupt, and exit gracefully. + set -e # Modified: https://github.com/hashicorp/docker-consul/blob/2c2873f9d619220d1eef0bc46ec78443f55a10b5/0.X/docker-entrypoint.sh diff --git a/e2e/.gitconfig b/e2e/.gitconfig deleted file mode 100644 index 3424a0e076..0000000000 --- a/e2e/.gitconfig +++ /dev/null @@ -1,3 +0,0 @@ -[user] - name = atlantisbot - email = lkysow+atlantis@gmail.com \ No newline at end of file diff --git a/e2e/e2e.go b/e2e/e2e.go index f19b45b024..079c329df2 100644 --- a/e2e/e2e.go +++ b/e2e/e2e.go @@ -14,29 +14,25 @@ package main import ( + "context" "fmt" "log" "os" "os/exec" "time" - - "github.com/google/go-github/v58/github" ) type E2ETester struct { - githubClient *GithubClient - repoURL string - ownerName string - repoName string + vcsClient VCSClient hookID int64 cloneDirRoot string projectType Project } type E2EResult struct { - projectType string - githubPullRequestURL string - testResult string + projectType string + pullRequestURL string + testResult string } var testFileData = ` @@ -45,7 +41,7 @@ resource "null_resource" "hello" { ` // nolint: gosec -func (t *E2ETester) Start() (*E2EResult, error) { +func (t *E2ETester) Start(ctx context.Context) (*E2EResult, error) { cloneDir := fmt.Sprintf("%s/%s-test", t.cloneDirRoot, t.projectType.Name) branchName := fmt.Sprintf("%s-%s", t.projectType.Name, time.Now().Format("20060102150405")) testFileName := fmt.Sprintf("%s.tf", t.projectType.Name) @@ -58,11 +54,9 @@ func (t *E2ETester) Start() (*E2EResult, error) { return e2eResult, fmt.Errorf("failed to create dir %q prior to cloning, attempting to continue: %v", cloneDir, err) } - cloneCmd := exec.Command("git", "clone", t.repoURL, cloneDir) - // git clone the repo - log.Printf("git cloning into %q", cloneDir) - if output, err := cloneCmd.CombinedOutput(); err != nil { - return e2eResult, fmt.Errorf("failed to clone repository: %v: %s", err, string(output)) + err := t.vcsClient.Clone(cloneDir) + if err != nil { + return e2eResult, err } // checkout a new branch for the project @@ -77,7 +71,7 @@ func (t *E2ETester) Start() (*E2EResult, error) { randomData := []byte(testFileData) filePath := fmt.Sprintf("%s/%s/%s", cloneDir, t.projectType.Name, testFileName) log.Printf("creating file to commit %q", filePath) - err := os.WriteFile(filePath, randomData, 0644) + err = os.WriteFile(filePath, randomData, 0644) if err != nil { return e2eResult, fmt.Errorf("couldn't write file %s: %v", filePath, err) } @@ -108,23 +102,24 @@ func (t *E2ETester) Start() (*E2EResult, error) { // create a new pr title := fmt.Sprintf("This is a test pull request for atlantis e2e test for %s project type", t.projectType.Name) - head := fmt.Sprintf("%s:%s", t.ownerName, branchName) - body := "" - base := "main" - newPullRequest := &github.NewPullRequest{Title: &title, Head: &head, Body: &body, Base: &base} + url, pullId, err := t.vcsClient.CreatePullRequest(ctx, title, branchName) - pull, _, err := t.githubClient.client.PullRequests.Create(t.githubClient.ctx, t.ownerName, t.repoName, newPullRequest) if err != nil { - return e2eResult, fmt.Errorf("error while creating new pull request: %v", err) + return e2eResult, err } // set pull request url - e2eResult.githubPullRequestURL = pull.GetHTMLURL() + e2eResult.pullRequestURL = url - log.Printf("created pull request %s", pull.GetHTMLURL()) + log.Printf("created pull request %s", url) // defer closing pull request and delete remote branch - defer cleanUp(t, pull.GetNumber(), branchName) // nolint: errcheck + defer func() { + err := cleanUp(ctx, t, pullId, branchName) + if err != nil { + log.Printf("Failed to cleanup: %v", err) + } + }() // wait for atlantis to respond to webhook and autoplan. time.Sleep(2 * time.Second) @@ -133,9 +128,9 @@ func (t *E2ETester) Start() (*E2EResult, error) { // waiting for atlantis run and finish maxLoops := 20 i := 0 - for ; i < maxLoops && checkStatus(state); i++ { + for ; i < maxLoops && t.vcsClient.IsAtlantisInProgress(state); i++ { time.Sleep(2 * time.Second) - state, _ = getAtlantisStatus(t, branchName) + state, _ = t.vcsClient.GetAtlantisStatus(ctx, branchName) if state == "" { log.Println("atlantis run hasn't started") continue @@ -149,52 +144,26 @@ func (t *E2ETester) Start() (*E2EResult, error) { log.Printf("atlantis run finished with status %q", state) e2eResult.testResult = state // check if atlantis run was a success - if state != "success" { + if !t.vcsClient.DidAtlantisSucceed(state) { return e2eResult, fmt.Errorf("atlantis run project type %q failed with %q status", t.projectType.Name, state) } return e2eResult, nil } -func getAtlantisStatus(t *E2ETester, branchName string) (string, error) { - // check repo status - combinedStatus, _, err := t.githubClient.client.Repositories.GetCombinedStatus(t.githubClient.ctx, t.ownerName, t.repoName, branchName, nil) - if err != nil { - return "", err - } - - for _, status := range combinedStatus.Statuses { - if status.GetContext() == "atlantis/plan" { - return status.GetState(), nil - } - } - - return "", nil -} - -func checkStatus(state string) bool { - for _, s := range []string{"success", "error", "failure"} { - if state == s { - return false - } - } - return true -} - -func cleanUp(t *E2ETester, pullRequestNumber int, branchName string) error { +func cleanUp(ctx context.Context, t *E2ETester, pullRequestNumber int, branchName string) error { // clean up - pullClosed, _, err := t.githubClient.client.PullRequests.Edit(t.githubClient.ctx, t.ownerName, t.repoName, pullRequestNumber, &github.PullRequest{State: github.String("closed")}) + err := t.vcsClient.ClosePullRequest(ctx, pullRequestNumber) if err != nil { - return fmt.Errorf("error while closing new pull request: %v", err) + return err } - log.Printf("closed pull request %d", pullClosed.GetNumber()) + log.Printf("closed pull request %d", pullRequestNumber) - deleteBranchName := fmt.Sprintf("%s/%s", "heads", branchName) - _, err = t.githubClient.client.Git.DeleteRef(t.githubClient.ctx, t.ownerName, t.repoName, deleteBranchName) + err = t.vcsClient.DeleteBranch(ctx, branchName) if err != nil { - return fmt.Errorf("error while deleting branch %s: %v", deleteBranchName, err) + return fmt.Errorf("error while deleting branch %s: %v", branchName, err) } - log.Printf("deleted branch %s", deleteBranchName) + log.Printf("deleted branch %s", branchName) return nil } diff --git a/e2e/github.go b/e2e/github.go index 8d9faedde0..dd70421e84 100644 --- a/e2e/github.go +++ b/e2e/github.go @@ -15,12 +15,163 @@ package main import ( "context" + "fmt" + "log" + "os" + "os/exec" + "strings" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" ) type GithubClient struct { - client *github.Client - ctx context.Context - username string + client *github.Client + username string + ownerName string + repoName string + token string +} + +func NewGithubClient() *GithubClient { + + githubUsername := os.Getenv("ATLANTIS_GH_USER") + if githubUsername == "" { + log.Fatalf("ATLANTIS_GH_USER cannot be empty") + } + githubToken := os.Getenv("ATLANTIS_GH_TOKEN") + if githubToken == "" { + log.Fatalf("ATLANTIS_GH_TOKEN cannot be empty") + } + ownerName := os.Getenv("GITHUB_REPO_OWNER_NAME") + if ownerName == "" { + ownerName = "runatlantis" + } + repoName := os.Getenv("GITHUB_REPO_NAME") + if repoName == "" { + repoName = "atlantis-tests" + } + + // create github client + tp := github.BasicAuthTransport{ + Username: strings.TrimSpace(githubUsername), + Password: strings.TrimSpace(githubToken), + } + ghClient := github.NewClient(tp.Client()) + + return &GithubClient{ + client: ghClient, + username: githubUsername, + ownerName: ownerName, + repoName: repoName, + token: githubToken, + } + +} + +func (g GithubClient) Clone(cloneDir string) error { + + repoURL := fmt.Sprintf("https://%s:%s@github.com/%s/%s.git", g.username, g.token, g.ownerName, g.repoName) + cloneCmd := exec.Command("git", "clone", repoURL, cloneDir) + // git clone the repo + log.Printf("git cloning into %q", cloneDir) + if output, err := cloneCmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to clone repository: %v: %s", err, string(output)) + } + return nil +} + +func (g GithubClient) CreateAtlantisWebhook(ctx context.Context, hookURL string) (int64, error) { + contentType := "json" + hookConfig := &github.HookConfig{ + ContentType: &contentType, + URL: &hookURL, + } + // create atlantis hook + atlantisHook := &github.Hook{ + Events: []string{"issue_comment", "pull_request", "push"}, + Config: hookConfig, + Active: github.Bool(true), + } + + hook, _, err := g.client.Repositories.CreateHook(ctx, g.ownerName, g.repoName, atlantisHook) + if err != nil { + return 0, err + } + log.Println(hook.GetURL()) + + return hook.GetID(), nil +} + +func (g GithubClient) DeleteAtlantisHook(ctx context.Context, hookID int64) error { + _, err := g.client.Repositories.DeleteHook(ctx, g.ownerName, g.repoName, hookID) + if err != nil { + return err + } + log.Printf("deleted webhook id %d", hookID) + + return nil +} + +func (g GithubClient) CreatePullRequest(ctx context.Context, title, branchName string) (string, int, error) { + head := fmt.Sprintf("%s:%s", g.ownerName, branchName) + body := "" + base := "main" + newPullRequest := &github.NewPullRequest{Title: &title, Head: &head, Body: &body, Base: &base} + + pull, _, err := g.client.PullRequests.Create(ctx, g.ownerName, g.repoName, newPullRequest) + if err != nil { + return "", 0, fmt.Errorf("error while creating new pull request: %v", err) + } + + // set pull request url + return pull.GetHTMLURL(), pull.GetNumber(), nil + +} + +func (g GithubClient) GetAtlantisStatus(ctx context.Context, branchName string) (string, error) { + // check repo status + combinedStatus, _, err := g.client.Repositories.GetCombinedStatus(ctx, g.ownerName, g.repoName, branchName, nil) + if err != nil { + return "", err + } + + for _, status := range combinedStatus.Statuses { + if status.GetContext() == "atlantis/plan" { + return status.GetState(), nil + } + } + + return "", nil +} + +func (g GithubClient) ClosePullRequest(ctx context.Context, pullRequestNumber int) error { + // clean up + _, _, err := g.client.PullRequests.Edit(ctx, g.ownerName, g.repoName, pullRequestNumber, &github.PullRequest{State: github.String("closed")}) + if err != nil { + return fmt.Errorf("error while closing new pull request: %v", err) + } + return nil + +} +func (g GithubClient) DeleteBranch(ctx context.Context, branchName string) error { + + deleteBranchName := fmt.Sprintf("%s/%s", "heads", branchName) + _, err := g.client.Git.DeleteRef(ctx, g.ownerName, g.repoName, deleteBranchName) + if err != nil { + return fmt.Errorf("error while deleting branch %s: %v", branchName, err) + } + return nil +} + +func (g GithubClient) IsAtlantisInProgress(state string) bool { + for _, s := range []string{"success", "error", "failure"} { + if state == s { + return false + } + } + return true +} + +func (g GithubClient) DidAtlantisSucceed(state string) bool { + return state == "success" } diff --git a/e2e/gitlab.go b/e2e/gitlab.go new file mode 100644 index 0000000000..2226aa299d --- /dev/null +++ b/e2e/gitlab.go @@ -0,0 +1,181 @@ +// Copyright 2017 HootSuite Media Inc. +// +// Licensed under the Apache License, Version 2.0 (the License); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an AS IS BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// Modified hereafter by contributors to runatlantis/atlantis. + +package main + +import ( + "context" + "fmt" + "log" + "os" + "os/exec" + + "github.com/xanzy/go-gitlab" +) + +type GitlabClient struct { + client *gitlab.Client + username string + ownerName string + repoName string + token string + projectId int + // A mapping from branch names to MR IDs + branchToMR map[string]int +} + +func NewGitlabClient() *GitlabClient { + + gitlabUsername := os.Getenv("ATLANTIS_GITLAB_USER") + if gitlabUsername == "" { + log.Fatalf("ATLANTIS_GITLAB_USER cannot be empty") + } + gitlabToken := os.Getenv("ATLANTIS_GITLAB_TOKEN") + if gitlabToken == "" { + log.Fatalf("ATLANTIS_GITLAB_TOKEN cannot be empty") + } + ownerName := os.Getenv("GITLAB_REPO_OWNER_NAME") + if ownerName == "" { + ownerName = "run-atlantis" + } + repoName := os.Getenv("GITLAB_REPO_NAME") + if repoName == "" { + repoName = "atlantis-tests" + } + + gitlabClient, err := gitlab.NewClient(gitlabToken) + if err != nil { + log.Fatalf("Failed to create client: %v", err) + } + project, _, err := gitlabClient.Projects.GetProject(fmt.Sprintf("%s/%s", ownerName, repoName), &gitlab.GetProjectOptions{}) + if err != nil { + log.Fatalf("Failed to find project: %v", err) + } + + return &GitlabClient{ + client: gitlabClient, + username: gitlabUsername, + ownerName: ownerName, + repoName: repoName, + token: gitlabToken, + projectId: project.ID, + branchToMR: make(map[string]int), + } + +} + +func (g GitlabClient) Clone(cloneDir string) error { + + repoURL := fmt.Sprintf("https://%s:%s@gitlab.com/%s/%s.git", g.username, g.token, g.ownerName, g.repoName) + cloneCmd := exec.Command("git", "clone", repoURL, cloneDir) + // git clone the repo + log.Printf("git cloning into %q", cloneDir) + if output, err := cloneCmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to clone repository: %v: %s", err, string(output)) + } + return nil + +} + +func (g GitlabClient) CreateAtlantisWebhook(ctx context.Context, hookURL string) (int64, error) { + hook, _, err := g.client.Projects.AddProjectHook(g.projectId, &gitlab.AddProjectHookOptions{ + URL: &hookURL, + IssuesEvents: gitlab.Ptr(true), + MergeRequestsEvents: gitlab.Ptr(true), + PushEvents: gitlab.Ptr(true), + }) + if err != nil { + return 0, err + } + log.Printf("created webhook for %s", hook.URL) + return int64(hook.ID), err +} + +func (g GitlabClient) DeleteAtlantisHook(ctx context.Context, hookID int64) error { + _, err := g.client.Projects.DeleteProjectHook(g.projectId, int(hookID)) + if err != nil { + return err + } + log.Printf("deleted webhook id %d", hookID) + return nil +} + +func (g GitlabClient) CreatePullRequest(ctx context.Context, title, branchName string) (string, int, error) { + + mr, _, err := g.client.MergeRequests.CreateMergeRequest(g.projectId, &gitlab.CreateMergeRequestOptions{ + Title: gitlab.Ptr(title), + SourceBranch: gitlab.Ptr(branchName), + TargetBranch: gitlab.Ptr("main"), + }) + if err != nil { + return "", 0, fmt.Errorf("error while creating new pull request: %v", err) + } + g.branchToMR[branchName] = mr.IID + return mr.WebURL, mr.IID, nil + +} + +func (g GitlabClient) GetAtlantisStatus(ctx context.Context, branchName string) (string, error) { + + pipelineInfos, _, err := g.client.MergeRequests.ListMergeRequestPipelines(g.projectId, g.branchToMR[branchName]) + if err != nil { + return "", err + } + // Possible todo: determine which status in the pipeline we care about? + if len(pipelineInfos) != 1 { + return "", fmt.Errorf("unexpected pipelines: %d", len(pipelineInfos)) + } + pipelineInfo := pipelineInfos[0] + pipeline, _, err := g.client.Pipelines.GetPipeline(g.projectId, pipelineInfo.ID) + if err != nil { + return "", err + } + + return pipeline.Status, nil +} + +func (g GitlabClient) ClosePullRequest(ctx context.Context, pullRequestNumber int) error { + // clean up + _, _, err := g.client.MergeRequests.UpdateMergeRequest(g.projectId, pullRequestNumber, &gitlab.UpdateMergeRequestOptions{ + StateEvent: gitlab.Ptr("close"), + }) + if err != nil { + return fmt.Errorf("error while closing new pull request: %v", err) + } + return nil + +} +func (g GitlabClient) DeleteBranch(ctx context.Context, branchName string) error { + _, err := g.client.Branches.DeleteBranch(g.projectId, branchName) + + if err != nil { + return fmt.Errorf("error while deleting branch %s: %v", branchName, err) + } + return nil + +} + +func (g GitlabClient) IsAtlantisInProgress(state string) bool { + // From https://docs.gitlab.com/ee/api/pipelines.html + // created, waiting_for_resource, preparing, pending, running, success, failed, canceled, skipped, manual, scheduled + for _, s := range []string{"success", "failed", "canceled", "skipped"} { + if state == s { + return false + } + } + return true +} + +func (g GitlabClient) DidAtlantisSucceed(state string) bool { + return state == "success" +} diff --git a/e2e/go.mod b/e2e/go.mod index d43c435acd..3b04a1f852 100644 --- a/e2e/go.mod +++ b/e2e/go.mod @@ -1,13 +1,22 @@ module github.com/runatlantis/atlantis/e2e -go 1.22 +go 1.23.2 require ( - github.com/google/go-github/v58 v58.0.0 + github.com/google/go-github/v65 v65.0.0 github.com/hashicorp/go-multierror v1.1.1 + github.com/xanzy/go-gitlab v0.111.0 ) require ( + github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-retryablehttp v0.7.7 // indirect + golang.org/x/net v0.23.0 // indirect + golang.org/x/oauth2 v0.6.0 // indirect + golang.org/x/time v0.3.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/protobuf v1.33.0 // indirect ) diff --git a/e2e/go.sum b/e2e/go.sum index 459e075eed..42fc2c0195 100644 --- a/e2e/go.sum +++ b/e2e/go.sum @@ -1,13 +1,60 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-github/v58 v58.0.0 h1:Una7GGERlF/37XfkPwpzYJe0Vp4dt2k1kCjlxwjIvzw= -github.com/google/go-github/v58 v58.0.0/go.mod h1:k4hxDKEfoWpSqFlc8LTpGd9fu2KrV1YAa6Hi6FmDNY4= +github.com/google/go-github/v65 v65.0.0 h1:pQ7BmO3DZivvFk92geC0jB0q2m3gyn8vnYPgV7GSLhQ= +github.com/google/go-github/v65 v65.0.0/go.mod h1:DvrqWo5hvsdhJvHd4WyVF9ttANN3BniqjP8uTFMNb60= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/xanzy/go-gitlab v0.111.0 h1:4zT52QdDVxGYAGxN2VY8upSvZIiuiI+Z4d+c+7D/lII= +github.com/xanzy/go-gitlab v0.111.0/go.mod h1:wKNKh3GkYDMOsGmnfuX+ITCmDuSDWFO0G+C4AygL9RY= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/oauth2 v0.6.0 h1:Lh8GPgSKBfWSwFvtuWOfeI3aAAnbXTSutYxJiOJFgIw= +golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= +golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/e2e/main.go b/e2e/main.go index a8a7634274..dacee30451 100644 --- a/e2e/main.go +++ b/e2e/main.go @@ -15,13 +15,12 @@ package main import ( "context" + "errors" "log" "os" - "strings" "fmt" - "github.com/google/go-github/v58/github" multierror "github.com/hashicorp/go-multierror" ) @@ -36,32 +35,29 @@ type Project struct { ApplyCommand string } -func main() { +func getVCSClient() (VCSClient, error) { - githubUsername := os.Getenv("GITHUB_USERNAME") - if githubUsername == "" { - log.Fatalf("GITHUB_USERNAME cannot be empty") + if os.Getenv("ATLANTIS_GH_USER") != "" { + log.Print("Running tests for github") + return NewGithubClient(), nil } - githubToken := os.Getenv("GITHUB_PASSWORD") - if githubToken == "" { - log.Fatalf("GITHUB_PASSWORD cannot be empty") + if os.Getenv("ATLANTIS_GITLAB_USER") != "" { + log.Print("Running tests for gitlab") + return NewGitlabClient(), nil } + + return nil, errors.New("could not determine which vcs client") +} + +func main() { + atlantisURL := os.Getenv("ATLANTIS_URL") if atlantisURL == "" { atlantisURL = defaultAtlantisURL } // add /events to the url atlantisURL = fmt.Sprintf("%s/events", atlantisURL) - ownerName := os.Getenv("GITHUB_REPO_OWNER_NAME") - if ownerName == "" { - ownerName = "runatlantis" - } - repoName := os.Getenv("GITHUB_REPO_NAME") - if repoName == "" { - repoName = "atlantis-tests" - } - // using https to clone the repo - repoURL := fmt.Sprintf("https://%s:%s@github.com/%s/%s.git", githubUsername, githubToken, ownerName, repoName) + cloneDirRoot := os.Getenv("CLONE_DIR") if cloneDirRoot == "" { cloneDirRoot = "/tmp/atlantis-tests" @@ -74,38 +70,31 @@ func main() { log.Fatalf("failed to clean dir %q before cloning, attempting to continue: %v", cloneDirRoot, err) } - // create github client - tp := github.BasicAuthTransport{ - Username: strings.TrimSpace(githubUsername), - Password: strings.TrimSpace(githubToken), + vcsClient, err := getVCSClient() + if err != nil { + log.Fatalf("failed to get vcs client: %v", err) } - ghClient := github.NewClient(tp.Client()) - - githubClient := &GithubClient{client: ghClient, ctx: context.Background(), username: githubUsername} - + ctx := context.Background() // we create atlantis hook once for the repo, since the atlantis server can handle multiple requests log.Printf("creating atlantis webhook with %s url", atlantisURL) - hookID, err := createAtlantisWebhook(githubClient, ownerName, repoName, atlantisURL) + hookID, err := vcsClient.CreateAtlantisWebhook(ctx, atlantisURL) if err != nil { log.Fatalf("error creating atlantis webhook: %v", err) } // create e2e test e2e := E2ETester{ - githubClient: githubClient, - repoURL: repoURL, - ownerName: ownerName, - repoName: repoName, + vcsClient: vcsClient, hookID: hookID, cloneDirRoot: cloneDirRoot, } // start e2e tests - results, err := startTests(e2e) + results, err := startTests(ctx, e2e) log.Printf("Test Results\n---------------------------\n") for _, result := range results { fmt.Printf("Project Type: %s \n", result.projectType) - fmt.Printf("Pull Request Link: %s \n", result.githubPullRequestURL) + fmt.Printf("Pull Request Link: %s \n", result.pullRequestURL) fmt.Printf("Atlantis Run Status: %s \n", result.testResult) fmt.Println("---------------------------") } @@ -115,52 +104,21 @@ func main() { } -func createAtlantisWebhook(g *GithubClient, ownerName string, repoName string, hookURL string) (int64, error) { - // create atlantis hook - atlantisHook := &github.Hook{ - Events: []string{"issue_comment", "pull_request", "push"}, - Config: map[string]interface{}{ - "url": hookURL, - "content_type": "json", - }, - Active: github.Bool(true), - } - - // moved to github.go - hook, _, err := g.client.Repositories.CreateHook(g.ctx, ownerName, repoName, atlantisHook) - if err != nil { - return 0, err - } - log.Println(hook.GetURL()) - - return hook.GetID(), nil -} - -func deleteAtlantisHook(g *GithubClient, ownerName string, repoName string, hookID int64) error { - _, err := g.client.Repositories.DeleteHook(g.ctx, ownerName, repoName, hookID) - if err != nil { - return err - } - log.Printf("deleted webhook id %d", hookID) - - return nil -} - func cleanDir(path string) error { return os.RemoveAll(path) } -func startTests(e2e E2ETester) ([]*E2EResult, error) { +func startTests(ctx context.Context, e2e E2ETester) ([]*E2EResult, error) { var testResults []*E2EResult var testErrors *multierror.Error // delete webhook when we are done running tests - defer deleteAtlantisHook(e2e.githubClient, e2e.ownerName, e2e.repoName, e2e.hookID) // nolint: errcheck + defer e2e.vcsClient.DeleteAtlantisHook(ctx, e2e.hookID) // nolint: errcheck for _, projectType := range projectTypes { log.Printf("starting e2e test for project type %q", projectType.Name) e2e.projectType = projectType // start e2e test - result, err := e2e.Start() + result, err := e2e.Start(ctx) testResults = append(testResults, result) testErrors = multierror.Append(testErrors, err) } diff --git a/e2e/vcs.go b/e2e/vcs.go new file mode 100644 index 0000000000..2648a913fe --- /dev/null +++ b/e2e/vcs.go @@ -0,0 +1,28 @@ +// Copyright 2017 HootSuite Media Inc. +// +// Licensed under the Apache License, Version 2.0 (the License); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an AS IS BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// Modified hereafter by contributors to runatlantis/atlantis. + +package main + +import "context" + +type VCSClient interface { + Clone(cloneDir string) error + CreateAtlantisWebhook(ctx context.Context, hookURL string) (int64, error) + DeleteAtlantisHook(ctx context.Context, hookID int64) error + CreatePullRequest(ctx context.Context, title, branchName string) (string, int, error) + GetAtlantisStatus(ctx context.Context, branchName string) (string, error) + ClosePullRequest(ctx context.Context, pullRequestNumber int) error + DeleteBranch(ctx context.Context, branchName string) error + IsAtlantisInProgress(state string) bool + DidAtlantisSucceed(state string) bool +} diff --git a/go.mod b/go.mod index c0d10cb4fa..e61905540f 100644 --- a/go.mod +++ b/go.mod @@ -1,103 +1,110 @@ module github.com/runatlantis/atlantis -go 1.22 +go 1.23.2 require ( - github.com/Masterminds/sprig/v3 v3.2.3 - github.com/alicebob/miniredis/v2 v2.31.1 - github.com/bradleyfalzon/ghinstallation/v2 v2.9.0 - github.com/briandowns/spinner v1.23.0 + code.gitea.io/sdk/gitea v0.19.0 + github.com/Masterminds/sprig/v3 v3.3.0 + github.com/alicebob/miniredis/v2 v2.33.0 + github.com/bradleyfalzon/ghinstallation/v2 v2.11.0 + github.com/briandowns/spinner v1.23.1 github.com/cactus/go-statsd-client/v5 v5.1.0 github.com/go-ozzo/ozzo-validation v3.6.0+incompatible - github.com/go-playground/validator/v10 v10.18.0 - github.com/go-test/deep v1.1.0 - github.com/golang-jwt/jwt/v5 v5.2.0 - github.com/google/go-github/v58 v58.0.0 + github.com/go-playground/validator/v10 v10.22.1 + github.com/go-test/deep v1.1.1 + github.com/golang-jwt/jwt/v5 v5.2.1 + github.com/google/go-github/v65 v65.0.0 github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 github.com/google/uuid v1.6.0 github.com/gorilla/mux v1.8.1 - github.com/gorilla/websocket v1.5.1 - github.com/hashicorp/go-getter/v2 v2.2.1 + github.com/gorilla/websocket v1.5.3 + github.com/hashicorp/go-getter/v2 v2.2.3 github.com/hashicorp/go-multierror v1.1.1 - github.com/hashicorp/go-version v1.6.0 + github.com/hashicorp/go-version v1.7.0 github.com/hashicorp/golang-lru/v2 v2.0.7 - github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72 + github.com/hashicorp/hc-install v0.9.0 + github.com/hashicorp/terraform-config-inspect v0.0.0-20240801114854-6714b46f5fe4 + github.com/jpillora/backoff v1.0.0 github.com/kr/pretty v0.3.1 github.com/mcdafydd/go-azuredevops v0.12.1 - github.com/microcosm-cc/bluemonday v1.0.26 + github.com/microcosm-cc/bluemonday v1.0.27 github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db github.com/mitchellh/go-homedir v1.1.0 github.com/moby/patternmatcher v0.6.0 github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 - github.com/petergtz/pegomock/v4 v4.0.0 + github.com/opentofu/tofudl v0.0.0-20240923062014-8c1e00f33ce6 + github.com/petergtz/pegomock/v4 v4.1.0 github.com/pkg/errors v0.9.1 - github.com/redis/go-redis/v9 v9.5.1 + github.com/redis/go-redis/v9 v9.6.2 github.com/remeh/sizedwaitgroup v1.0.0 - github.com/shurcooL/githubv4 v0.0.0-20240120211514-18a1ae0e79dc - github.com/slack-go/slack v0.12.4 - github.com/spf13/cobra v1.8.0 + github.com/shurcooL/githubv4 v0.0.0-20240727222349-48295856cce7 + github.com/slack-go/slack v0.15.0 + github.com/spf13/cobra v1.8.1 github.com/spf13/pflag v1.0.5 - github.com/spf13/viper v1.18.2 - github.com/stretchr/testify v1.8.4 + github.com/spf13/viper v1.19.0 + github.com/stretchr/testify v1.9.0 github.com/uber-go/tally/v4 v4.1.10 - github.com/urfave/negroni/v3 v3.1.0 - github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605 - github.com/xanzy/go-gitlab v0.97.0 - go.etcd.io/bbolt v1.3.8 + github.com/urfave/negroni/v3 v3.1.1 + github.com/xanzy/go-gitlab v0.111.0 + go.etcd.io/bbolt v1.3.11 go.uber.org/zap v1.27.0 - golang.org/x/term v0.17.0 - golang.org/x/text v0.14.0 + golang.org/x/term v0.25.0 + golang.org/x/text v0.19.0 gopkg.in/yaml.v3 v3.0.1 ) require ( github.com/agext/levenshtein v1.2.3 github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect - github.com/go-playground/locales v0.14.1 // indirect - github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/hashicorp/hcl/v2 v2.19.1 - github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/leodido/go-urn v1.4.0 // indirect + github.com/hashicorp/hcl/v2 v2.22.0 github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29 // indirect go.uber.org/atomic v1.11.0 // indirect ) require github.com/twmb/murmur3 v1.1.8 // indirect -require github.com/google/go-github/v57 v57.0.0 // indirect - require ( + dario.cat/mergo v1.0.1 // indirect github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver/v3 v3.2.1 // indirect + github.com/Masterminds/semver/v3 v3.3.0 // indirect + github.com/ProtonMail/go-crypto v1.1.0-alpha.2 // indirect + github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect + github.com/ProtonMail/gopenpgp/v2 v2.7.5 // indirect github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect - github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cloudflare/circl v1.3.9 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/davidmz/go-pageant v1.0.2 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect - github.com/fatih/color v1.15.0 // indirect + github.com/fatih/color v1.16.0 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/gabriel-vasile/mimetype v1.4.3 // indirect - github.com/golang-jwt/jwt/v4 v4.5.0 // indirect + github.com/go-fed/httpsig v1.1.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/golang-jwt/jwt/v4 v4.5.1 // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-cmp v0.6.0 // indirect + github.com/google/go-github/v62 v62.0.0 // indirect github.com/google/go-querystring v1.1.0 // indirect - github.com/gorilla/css v1.0.0 // indirect + github.com/gorilla/css v1.0.1 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect - github.com/hashicorp/go-retryablehttp v0.7.4 // indirect + github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect - github.com/huandu/xstrings v1.4.0 // indirect - github.com/imdario/mergo v0.3.16 // indirect - github.com/klauspost/compress v1.17.0 // indirect + github.com/huandu/xstrings v1.5.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/klauspost/compress v1.17.2 // indirect github.com/kr/text v0.2.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect @@ -105,7 +112,7 @@ require ( github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/onsi/gomega v1.27.6 // indirect - github.com/pelletier/go-toml/v2 v2.1.0 // indirect + github.com/pelletier/go-toml/v2 v2.2.2 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_golang v1.12.1 // indirect github.com/prometheus/client_model v0.2.0 // indirect @@ -114,22 +121,25 @@ require ( github.com/rogpeppe/go-internal v1.9.0 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect - github.com/shopspring/decimal v1.3.1 // indirect + github.com/shopspring/decimal v1.4.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/spf13/afero v1.11.0 // indirect - github.com/spf13/cast v1.6.0 // indirect + github.com/spf13/cast v1.7.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/ulikunitz/xz v0.5.11 // indirect - github.com/yuin/gopher-lua v1.1.0 // indirect - github.com/zclconf/go-cty v1.13.2 // indirect + github.com/yuin/gopher-lua v1.1.1 // indirect + github.com/zclconf/go-cty v1.14.4 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.19.0 // indirect - golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect - golang.org/x/net v0.21.0 // indirect - golang.org/x/oauth2 v0.15.0 // indirect - golang.org/x/sys v0.17.0 // indirect + golang.org/x/crypto v0.26.0 // indirect + golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect + golang.org/x/mod v0.21.0 // indirect + golang.org/x/net v0.26.0 // indirect + golang.org/x/oauth2 v0.18.0 // indirect + golang.org/x/sync v0.8.0 // indirect + golang.org/x/sys v0.26.0 // indirect golang.org/x/time v0.5.0 // indirect - google.golang.org/appengine v1.6.7 // indirect - google.golang.org/protobuf v1.31.0 // indirect + golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect + google.golang.org/appengine v1.6.8 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect ) diff --git a/go.sum b/go.sum index 1c2844f2fe..8613b6dde2 100644 --- a/go.sum +++ b/go.sum @@ -30,17 +30,28 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +code.gitea.io/sdk/gitea v0.19.0 h1:8I6s1s4RHgzxiPHhOQdgim1RWIRcr0LVMbHBjBFXq4Y= +code.gitea.io/sdk/gitea v0.19.0/go.mod h1:IG9xZJoltDNeDSW0qiF2Vqx5orMWa7OhVWrjvrd5NpI= +dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= +dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/DmitriyVTitov/size v1.5.0/go.mod h1:le6rNI4CoLQV1b9gzp1+3d7hMAD/uu2QcJ+aYbNgiU0= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= -github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= -github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= -github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= -github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= +github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0= +github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs= +github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0= +github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/ProtonMail/go-crypto v1.1.0-alpha.2 h1:bkyFVUP+ROOARdgCiJzNQo2V2kiB97LyUpzH9P6Hrlg= +github.com/ProtonMail/go-crypto v1.1.0-alpha.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k= +github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= +github.com/ProtonMail/gopenpgp/v2 v2.7.5 h1:STOY3vgES59gNgoOt2w0nyHBjKViB/qSg7NjbQWPJkA= +github.com/ProtonMail/gopenpgp/v2 v2.7.5/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -50,10 +61,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk= github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= -github.com/alicebob/miniredis/v2 v2.31.1 h1:7XAt0uUg3DtwEKW5ZAGa+K7FZV2DdKQo5K/6TTnfX8Y= -github.com/alicebob/miniredis/v2 v2.31.1/go.mod h1:UB/T2Uztp7MlFSDakaX1sTXUv5CASoprx0wulRT6HBg= -github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= -github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= +github.com/alicebob/miniredis/v2 v2.33.0 h1:uvTF0EDeu9RLnUEG27Db5I68ESoIxTiXbNUiji6lZrA= +github.com/alicebob/miniredis/v2 v2.33.0/go.mod h1:MhP4a3EU7aENRi9aO+tHfTBZicLqQevyi/DJpoj6mi0= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= @@ -66,14 +75,15 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= -github.com/bradleyfalzon/ghinstallation/v2 v2.9.0 h1:HmxIYqnxubRYcYGRc5v3wUekmo5Wv2uX3gukmWJ0AFk= -github.com/bradleyfalzon/ghinstallation/v2 v2.9.0/go.mod h1:wmkTDJf8CmVypxE8ijIStFnKoTa6solK5QfdmJrP9KI= -github.com/briandowns/spinner v1.23.0 h1:alDF2guRWqa/FOZZYWjlMIx2L6H0wyewPxo/CH4Pt2A= -github.com/briandowns/spinner v1.23.0/go.mod h1:rPG4gmXeN3wQV/TsAY4w8lPdIM6RX3yqeBQJSrbXjuE= +github.com/bradleyfalzon/ghinstallation/v2 v2.11.0 h1:R9d0v+iobRHSaE4wKUnXFiZp53AL4ED5MzgEMwGTZag= +github.com/bradleyfalzon/ghinstallation/v2 v2.11.0/go.mod h1:0LWKQwOHewXO/1acI6TtyE0Xc4ObDb2rFN7eHBAG71M= +github.com/briandowns/spinner v1.23.1 h1:t5fDPmScwUjozhDj4FA46p5acZWIPXYE30qW2Ptu650= +github.com/briandowns/spinner v1.23.1/go.mod h1:LaZeM4wm2Ywy6vO571mvhQNRcWfRUnXOs0RcKV0wYKM= github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= github.com/cactus/go-statsd-client/v5 v5.0.0/go.mod h1:COEvJ1E+/E2L4q6QE5CkjWPi4eeDw9maJBMIuMPBZbY= github.com/cactus/go-statsd-client/v5 v5.1.0 h1:sbbdfIl9PgisjEoXzvXI1lwUKWElngsjJKaZeC021P4= github.com/cactus/go-statsd-client/v5 v5.1.0/go.mod h1:COEvJ1E+/E2L4q6QE5CkjWPi4eeDw9maJBMIuMPBZbY= @@ -86,27 +96,44 @@ github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWR github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= +github.com/cloudflare/circl v1.3.9 h1:QFrlgFYf2Qpi8bSpVPK1HBvWpx16v/1TZivyo7pGuBE= +github.com/cloudflare/circl v1.3.9/go.mod h1:PDRU+oXvdD7KCtgKxW95M5Z8BpSCJXQORiZFnBQS5QU= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= +github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davidmz/go-pageant v1.0.2 h1:bPblRCh5jGU+Uptpz6LgMZGD5hJoOt7otgT454WvHn0= +github.com/davidmz/go-pageant v1.0.2/go.mod h1:P2EDDnMqIwG5Rrp05dTRITj9z2zpGcD9efWSkTNKLIE= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= +github.com/go-fed/httpsig v1.1.0 h1:9M+hb0jkEICD8/cAiNqEB66R87tTINszBRTjwjQzWcI= +github.com/go-fed/httpsig v1.1.0/go.mod h1:RCMrTZvN1bJYtofsG4rd5NaO5obxQ5xBkdiS7xsT7bM= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= +github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= +github.com/go-git/go-git/v5 v5.12.0 h1:7Md+ndsjrzZxbddRDZjF14qK+NN56sy6wkqaVrjZtys= +github.com/go-git/go-git/v5 v5.12.0/go.mod h1:FTM9VKtnI2m65hNI/TenDDDnUf2Q9FHnXYjuz9i5OEY= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -118,8 +145,8 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9 github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= -github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-ozzo/ozzo-validation v3.6.0+incompatible h1:msy24VGS42fKO9K1vLz82/GeYW1cILu7Nuuj1N3BBkE= github.com/go-ozzo/ozzo-validation v3.6.0+incompatible/go.mod h1:gsEKFIVnabGBt6mXmxK0MoFy+cZoTJY6mu5Ll3LVLBU= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= @@ -128,23 +155,25 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U= -github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA= +github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= github.com/go-test/deep v1.0.4/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg= -github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= +github.com/go-test/deep v1.1.1 h1:0r/53hagsehfO4bzD2Pgr/+RgHqhmf+k1Bpse2cTu1U= +github.com/go-test/deep v1.1.1/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw= -github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= +github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= @@ -186,10 +215,10 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-github/v57 v57.0.0 h1:L+Y3UPTY8ALM8x+TV0lg+IEBI+upibemtBD8Q9u7zHs= -github.com/google/go-github/v57 v57.0.0/go.mod h1:s0omdnye0hvK/ecLvpsGfJMiRt85PimQh4oygmLIxHw= -github.com/google/go-github/v58 v58.0.0 h1:Una7GGERlF/37XfkPwpzYJe0Vp4dt2k1kCjlxwjIvzw= -github.com/google/go-github/v58 v58.0.0/go.mod h1:k4hxDKEfoWpSqFlc8LTpGd9fu2KrV1YAa6Hi6FmDNY4= +github.com/google/go-github/v62 v62.0.0 h1:/6mGCaRywZz9MuHyw9gD1CwsbmBX8GWsbFkwMmHdhl4= +github.com/google/go-github/v62 v62.0.0/go.mod h1:EMxeUqGJq2xRu9DYBMwel/mr7kZrzUOfQmmpYrZn2a4= +github.com/google/go-github/v65 v65.0.0 h1:pQ7BmO3DZivvFk92geC0jB0q2m3gyn8vnYPgV7GSLhQ= +github.com/google/go-github/v65 v65.0.0/go.mod h1:DvrqWo5hvsdhJvHd4WyVF9ttANN3BniqjP8uTFMNb60= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= @@ -208,56 +237,55 @@ github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLe github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= -github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= +github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= -github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-getter/v2 v2.2.1 h1:2JXqPZs1Jej67RtdTi0YZaEB2hEFB3fkBA4cPYKQwFQ= -github.com/hashicorp/go-getter/v2 v2.2.1/go.mod h1:EcJx6oZE8hmGuRR1l38QrfnyiujQbwsEAn11eHv6l2M= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= -github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-getter/v2 v2.2.3 h1:6CVzhT0KJQHqd9b0pK3xSP0CM/Cv+bVhk+jcaRJ2pGk= +github.com/hashicorp/go-getter/v2 v2.2.3/go.mod h1:hp5Yy0GMQvwWVUmwLs3ygivz1JSLI323hdIE9J9m7TY= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-retryablehttp v0.7.4 h1:ZQgVdpTdAL7WpMIwLzCfbalOcSUdkDZnpUv3/+BxzFA= -github.com/hashicorp/go-retryablehttp v0.7.4/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= -github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= -github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/hc-install v0.9.0 h1:2dIk8LcvANwtv3QZLckxcjyF5w8KVtiMxu6G6eLhghE= +github.com/hashicorp/hc-install v0.9.0/go.mod h1:+6vOP+mf3tuGgMApVYtmsnDoKWMDcFXeTxCACYZ8SFg= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/hcl/v2 v2.19.1 h1://i05Jqznmb2EXqa39Nsvyan2o5XyMowW5fnCKW5RPI= -github.com/hashicorp/hcl/v2 v2.19.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE= -github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72 h1:nZ5gGjbe5o7XUu1d7j+Y5Ztcxlp+yaumTKH9i0D3wlg= -github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72/go.mod h1:l8HcFPm9cQh6Q0KSWoYPiePqMvRFenybP1CH2MjKdlg= -github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= -github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/hashicorp/hcl/v2 v2.22.0 h1:hkZ3nCtqeJsDhPRFz5EA9iwcG1hNWGePOTw6oyul12M= +github.com/hashicorp/hcl/v2 v2.22.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= +github.com/hashicorp/terraform-config-inspect v0.0.0-20240801114854-6714b46f5fe4 h1:RwY5HBgtBZ997UtKJAO2Rx+94ETyevwWEVXWx1SL5YY= +github.com/hashicorp/terraform-config-inspect v0.0.0-20240801114854-6714b46f5fe4/go.mod h1:Gz/z9Hbn+4KSp8A2FBtNszfLSdT2Tn/uAKGuVqqWmDI= +github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI= +github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= -github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -267,9 +295,11 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1 github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= -github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= +github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= @@ -280,8 +310,6 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= @@ -289,18 +317,17 @@ github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3v github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/mcdafydd/go-azuredevops v0.12.1 h1:WxwLVyGuJ8oL7uWQp1/J6GefX1wMQQZUHWRGsrm+uE8= github.com/mcdafydd/go-azuredevops v0.12.1/go.mod h1:B4UDyn7WEj1/97f45j3VnzEfkWKe05+/dCcAPdOET4A= -github.com/microcosm-cc/bluemonday v1.0.26 h1:xbqSvqzQMeEHCqMi64VAs4d8uy6Mequs3rQ0k/Khz58= -github.com/microcosm-cc/bluemonday v1.0.26/go.mod h1:JyzOCs9gkyQyjs+6h10UEVSe02CGwkhd72Xdqh78TWs= +github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk= +github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= @@ -311,7 +338,6 @@ github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQ github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= @@ -329,10 +355,14 @@ github.com/onsi/ginkgo/v2 v2.9.2 h1:BA2GMJOtfGAfagzYtrAlufIP0lq6QERkFmHLMLPwFSU= github.com/onsi/ginkgo/v2 v2.9.2/go.mod h1:WHcJJG2dIlcCqVfBAwUCrJxSPFb6v4azBwgxeMeDuts= github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg= -github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= -github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= -github.com/petergtz/pegomock/v4 v4.0.0 h1:BIGMUof4NXc+xBbuFk0VBfK5Ls7DplcP+LWz4hfYWsY= -github.com/petergtz/pegomock/v4 v4.0.0/go.mod h1:Xscaw/kXYcuh9sGsns+If19FnSMMQy4Wz60YJTn3XOU= +github.com/opentofu/tofudl v0.0.0-20240923062014-8c1e00f33ce6 h1:+1yJm0gEoDaxYmMhmmU3gRAOMx3A43z84bokm1dQroU= +github.com/opentofu/tofudl v0.0.0-20240923062014-8c1e00f33ce6/go.mod h1:CD1BhvxxNPp4ZBwNBjWycf5isG9UaPrzfE7J/E/s6RY= +github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= +github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= +github.com/petergtz/pegomock/v4 v4.1.0 h1:Reoy2rlwshuxNaD2ZWp5TrSCrmoFH5SSLHb5U1z2pog= +github.com/petergtz/pegomock/v4 v4.1.0/go.mod h1:Xscaw/kXYcuh9sGsns+If19FnSMMQy4Wz60YJTn3XOU= +github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -365,8 +395,8 @@ github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1 github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.10.1 h1:kYK1Va/YMlutzCGazswoHKo//tZVlFpKYh+PymziUAg= github.com/prometheus/procfs v0.10.1/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM= -github.com/redis/go-redis/v9 v9.5.1 h1:H1X4D3yHPaYrkL5X06Wh6xNVM/pX0Ft4RV0vMGvLBh8= -github.com/redis/go-redis/v9 v9.5.1/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= +github.com/redis/go-redis/v9 v9.6.2 h1:w0uvkRbc9KpgD98zcvo5IrVUsn0lXpRMuhNgiHDJzdk= +github.com/redis/go-redis/v9 v9.6.2/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA= github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E= github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -379,45 +409,47 @@ github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6g github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/samber/lo v1.38.1 h1:j2XEAqXKb09Am4ebOg31SpvzUTTs6EN3VfgeLUhPdXM= github.com/samber/lo v1.38.1/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA= -github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= -github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shurcooL/githubv4 v0.0.0-20240120211514-18a1ae0e79dc h1:vH0NQbIDk+mJLvBliNGfcQgUmhlniWBDXC79oRxfZA0= -github.com/shurcooL/githubv4 v0.0.0-20240120211514-18a1ae0e79dc/go.mod h1:zqMwyHmnN/eDOZOdiTohqIUKUrTFX62PNlu7IJdu0q8= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= +github.com/shurcooL/githubv4 v0.0.0-20240727222349-48295856cce7 h1:cYCy18SHPKRkvclm+pWm1Lk4YrREb4IOIb/YdFO0p2M= +github.com/shurcooL/githubv4 v0.0.0-20240727222349-48295856cce7/go.mod h1:zqMwyHmnN/eDOZOdiTohqIUKUrTFX62PNlu7IJdu0q8= github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29 h1:B1PEwpArrNp4dkQrfxh/abbBAOZBVp0ds+fBEOUOqOc= github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/slack-go/slack v0.12.4 h1:4iLT2opw+/QptmQxBNA7S8pNfSIvtn0NDGu7Jq0emi4= -github.com/slack-go/slack v0.12.4/go.mod h1:hlGi5oXA+Gt+yWTPP0plCdRKmjsDxecdHxYQdlMQKOw= +github.com/skeema/knownhosts v1.2.2 h1:Iug2P4fLmDw9f41PB6thxUkNUkJzB5i+1/exaj40L3A= +github.com/skeema/knownhosts v1.2.2/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= +github.com/slack-go/slack v0.15.0 h1:LE2lj2y9vqqiOf+qIIy0GvEoxgF1N5yLGZffmEZykt0= +github.com/slack-go/slack v0.15.0/go.mod h1:hlGi5oXA+Gt+yWTPP0plCdRKmjsDxecdHxYQdlMQKOw= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= -github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= -github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w= +github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= -github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= +github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= +github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/twmb/murmur3 v1.1.5/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ= @@ -427,22 +459,24 @@ github.com/uber-go/tally/v4 v4.1.10 h1:2GSX7Tmq26wjAvOtQEc5EvRROIkX2OX4vpROt6mlR github.com/uber-go/tally/v4 v4.1.10/go.mod h1:pPR56rjthjtLB8xQlEx2I1VwAwRGCh/i4xMUcmG+6z4= github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8= github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/urfave/negroni/v3 v3.1.0 h1:lzmuxGSpnJCT/ujgIAjkU3+LW3NX8alCglO/L6KjIGQ= -github.com/urfave/negroni/v3 v3.1.0/go.mod h1:jWvnX03kcSjDBl/ShB0iHvx5uOs7mAzZXW+JvJ5XYAs= -github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605 h1:bRt3KvPapqnO3s9XenyU4COpU9X7cNW3BMELyHRxuSs= -github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605/go.mod h1:saryXNaL624mlulV138FP+HhVw7IpvETUXLS3nTvH1g= -github.com/xanzy/go-gitlab v0.97.0 h1:StMqJ1Kvt00X43pYIBBjj52dFlghwSeBhRDRfzaZ7xY= -github.com/xanzy/go-gitlab v0.97.0/go.mod h1:ETg8tcj4OhrB84UEgeE8dSuV/0h4BBL1uOV/qK0vlyI= +github.com/urfave/negroni/v3 v3.1.1 h1:6MS4nG9Jk/UuCACaUlNXCbiKa0ywF9LXz5dGu09v8hw= +github.com/urfave/negroni/v3 v3.1.1/go.mod h1:jWvnX03kcSjDBl/ShB0iHvx5uOs7mAzZXW+JvJ5XYAs= +github.com/xanzy/go-gitlab v0.111.0 h1:4zT52QdDVxGYAGxN2VY8upSvZIiuiI+Z4d+c+7D/lII= +github.com/xanzy/go-gitlab v0.111.0/go.mod h1:wKNKh3GkYDMOsGmnfuX+ITCmDuSDWFO0G+C4AygL9RY= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/yuin/gopher-lua v1.1.0 h1:BojcDhfyDWgU2f2TOzYK/g5p2gxMrku8oupLDqlnSqE= -github.com/yuin/gopher-lua v1.1.0/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= -github.com/zclconf/go-cty v1.13.2 h1:4GvrUxe/QUDYuJKAav4EYqdM47/kZa672LwmXFmEKT0= -github.com/zclconf/go-cty v1.13.2/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= -go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= -go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M= +github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= +github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8= +github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= +github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= +github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= +go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= +go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -465,10 +499,12 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200403201458-baeed622b8d8/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= -golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= +golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= +golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= +golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -479,8 +515,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= -golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -502,6 +538,9 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= +golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -536,8 +575,10 @@ golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= -golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -546,8 +587,8 @@ golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4Iltr golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.15.0 h1:s8pnnxNVzjWyrvYdFUQq5llS1PX2zhPXmccZv99h7uQ= -golang.org/x/oauth2 v0.15.0/go.mod h1:q48ptWNTY5XWf+JNten23lcvHpLJ0ZSxF5ttTHKVCAM= +golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI= +golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -558,10 +599,12 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -601,14 +644,18 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= -golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= -golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= -golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24= +golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -616,9 +663,12 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -665,8 +715,9 @@ golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= -golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -693,8 +744,8 @@ google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7 google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -748,8 +799,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= -google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -759,6 +810,8 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/validator.v2 v2.0.0-20200605151824-2b28d334fa05/go.mod h1:o4V0GXN9/CAmCsvJ0oXYZvrZOe7syiDZSN1GWGZTGzc= +gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/goss.yaml b/goss.yaml new file mode 100644 index 0000000000..efb4796f42 --- /dev/null +++ b/goss.yaml @@ -0,0 +1,37 @@ +# See: https://github.com/goss-org/goss/blob/master/docs/gossfile.md + +command: + # ensure atlantis is available + atlantis-available: + exec: "atlantis version" + exit-status: 0 + stdout: [] + stderr: [] + + # ensure conftest is available + conftest-available: + exec: "conftest -v" + exit-status: 0 + stdout: [] + stderr: [] + + # ensure git-lfs is available + git-lfs-available: + exec: "git-lfs -v" + exit-status: 0 + stdout: [] + stderr: [] + + # ensure terraform is available + terraform-available: + exec: "terraform version" + exit-status: 0 + stdout: [] + stderr: [] + + # ensure tofu binary is available + tofu-available: + exec: "tofu version" + exit-status: 0 + stdout: [] + stderr: [] diff --git a/netlify.toml b/netlify.toml new file mode 100644 index 0000000000..f32bbf2065 --- /dev/null +++ b/netlify.toml @@ -0,0 +1,32 @@ +# Netlify Config, https://www.netlify.com/docs/netlify-toml-reference/ +[build] +base = "/" +command = "npm install && npm run website:build" +publish = "runatlantis.io/.vitepress/dist/" + +[[redirects]] +force = true +from = "/guide/getting-started.html" +status = 301 +to = "/guide/" + +[[redirects]] +force = true +from = "/docs/atlantis-yaml-reference.html" +status = 301 +to = "/docs/repo-level-atlantis-yaml.html" + +[[headers]] +for = "/*" +[headers.values] +Cache-Control = "public, max-age=86400" +Referrer-Policy = "no-referrer" +Strict-Transport-Security = "max-age=86400; includeSubDomains; preload" +X-Content-Type-Options = "nosniff" +X-Frame-Options = "DENY" +X-XSS-Protection = "1; mode=block" + +[[headers]] +for = "*.html" +[headers.values] +Content-Type = "text/html; charset=UTF-8" diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000000..9807b006af --- /dev/null +++ b/package-lock.json @@ -0,0 +1,4119 @@ +{ + "name": "atlantis", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "license": "Apache-2.0", + "devDependencies": { + "@playwright/test": "^1.44.0", + "@types/node": "^20.12.12", + "@vueuse/core": "^10.9.0", + "markdown-it-footnote": "^4.0.0", + "markdownlint-cli": "^0.40.0", + "mermaid": "^10.9.1", + "sitemap-ts": "^1.7.3", + "vitepress": "^1.2.3", + "vitepress-plugin-mermaid": "^2.0.16", + "vue": "^3.4.27" + } + }, + "node_modules/@algolia/autocomplete-core": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.9.3.tgz", + "integrity": "sha512-009HdfugtGCdC4JdXUbVJClA0q0zh24yyePn+KUGk3rP7j8FEe/m5Yo/z65gn6nP/cM39PxpzqKrL7A6fP6PPw==", + "dev": true, + "dependencies": { + "@algolia/autocomplete-plugin-algolia-insights": "1.9.3", + "@algolia/autocomplete-shared": "1.9.3" + } + }, + "node_modules/@algolia/autocomplete-plugin-algolia-insights": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-plugin-algolia-insights/-/autocomplete-plugin-algolia-insights-1.9.3.tgz", + "integrity": "sha512-a/yTUkcO/Vyy+JffmAnTWbr4/90cLzw+CC3bRbhnULr/EM0fGNvM13oQQ14f2moLMcVDyAx/leczLlAOovhSZg==", + "dev": true, + "dependencies": { + "@algolia/autocomplete-shared": "1.9.3" + }, + "peerDependencies": { + "search-insights": ">= 1 < 3" + } + }, + "node_modules/@algolia/autocomplete-preset-algolia": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.9.3.tgz", + "integrity": "sha512-d4qlt6YmrLMYy95n5TB52wtNDr6EgAIPH81dvvvW8UmuWRgxEtY0NJiPwl/h95JtG2vmRM804M0DSwMCNZlzRA==", + "dev": true, + "dependencies": { + "@algolia/autocomplete-shared": "1.9.3" + }, + "peerDependencies": { + "@algolia/client-search": ">= 4.9.1 < 6", + "algoliasearch": ">= 4.9.1 < 6" + } + }, + "node_modules/@algolia/autocomplete-shared": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.9.3.tgz", + "integrity": "sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ==", + "dev": true, + "peerDependencies": { + "@algolia/client-search": ">= 4.9.1 < 6", + "algoliasearch": ">= 4.9.1 < 6" + } + }, + "node_modules/@algolia/cache-browser-local-storage": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.23.3.tgz", + "integrity": "sha512-vRHXYCpPlTDE7i6UOy2xE03zHF2C8MEFjPN2v7fRbqVpcOvAUQK81x3Kc21xyb5aSIpYCjWCZbYZuz8Glyzyyg==", + "dev": true, + "dependencies": { + "@algolia/cache-common": "4.23.3" + } + }, + "node_modules/@algolia/cache-common": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.23.3.tgz", + "integrity": "sha512-h9XcNI6lxYStaw32pHpB1TMm0RuxphF+Ik4o7tcQiodEdpKK+wKufY6QXtba7t3k8eseirEMVB83uFFF3Nu54A==", + "dev": true + }, + "node_modules/@algolia/cache-in-memory": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.23.3.tgz", + "integrity": "sha512-yvpbuUXg/+0rbcagxNT7un0eo3czx2Uf0y4eiR4z4SD7SiptwYTpbuS0IHxcLHG3lq22ukx1T6Kjtk/rT+mqNg==", + "dev": true, + "dependencies": { + "@algolia/cache-common": "4.23.3" + } + }, + "node_modules/@algolia/client-account": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.23.3.tgz", + "integrity": "sha512-hpa6S5d7iQmretHHF40QGq6hz0anWEHGlULcTIT9tbUssWUriN9AUXIFQ8Ei4w9azD0hc1rUok9/DeQQobhQMA==", + "dev": true, + "dependencies": { + "@algolia/client-common": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/client-analytics": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.23.3.tgz", + "integrity": "sha512-LBsEARGS9cj8VkTAVEZphjxTjMVCci+zIIiRhpFun9jGDUlS1XmhCW7CTrnaWeIuCQS/2iPyRqSy1nXPjcBLRA==", + "dev": true, + "dependencies": { + "@algolia/client-common": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/client-common": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.23.3.tgz", + "integrity": "sha512-l6EiPxdAlg8CYhroqS5ybfIczsGUIAC47slLPOMDeKSVXYG1n0qGiz4RjAHLw2aD0xzh2EXZ7aRguPfz7UKDKw==", + "dev": true, + "dependencies": { + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/client-personalization": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.23.3.tgz", + "integrity": "sha512-3E3yF3Ocr1tB/xOZiuC3doHQBQ2zu2MPTYZ0d4lpfWads2WTKG7ZzmGnsHmm63RflvDeLK/UVx7j2b3QuwKQ2g==", + "dev": true, + "dependencies": { + "@algolia/client-common": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/client-search": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.23.3.tgz", + "integrity": "sha512-P4VAKFHqU0wx9O+q29Q8YVuaowaZ5EM77rxfmGnkHUJggh28useXQdopokgwMeYw2XUht49WX5RcTQ40rZIabw==", + "dev": true, + "dependencies": { + "@algolia/client-common": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/logger-common": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.23.3.tgz", + "integrity": "sha512-y9kBtmJwiZ9ZZ+1Ek66P0M68mHQzKRxkW5kAAXYN/rdzgDN0d2COsViEFufxJ0pb45K4FRcfC7+33YB4BLrZ+g==", + "dev": true + }, + "node_modules/@algolia/logger-console": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.23.3.tgz", + "integrity": "sha512-8xoiseoWDKuCVnWP8jHthgaeobDLolh00KJAdMe9XPrWPuf1by732jSpgy2BlsLTaT9m32pHI8CRfrOqQzHv3A==", + "dev": true, + "dependencies": { + "@algolia/logger-common": "4.23.3" + } + }, + "node_modules/@algolia/recommend": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-4.23.3.tgz", + "integrity": "sha512-9fK4nXZF0bFkdcLBRDexsnGzVmu4TSYZqxdpgBW2tEyfuSSY54D4qSRkLmNkrrz4YFvdh2GM1gA8vSsnZPR73w==", + "dev": true, + "dependencies": { + "@algolia/cache-browser-local-storage": "4.23.3", + "@algolia/cache-common": "4.23.3", + "@algolia/cache-in-memory": "4.23.3", + "@algolia/client-common": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/logger-common": "4.23.3", + "@algolia/logger-console": "4.23.3", + "@algolia/requester-browser-xhr": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/requester-node-http": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/requester-browser-xhr": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.23.3.tgz", + "integrity": "sha512-jDWGIQ96BhXbmONAQsasIpTYWslyjkiGu0Quydjlowe+ciqySpiDUrJHERIRfELE5+wFc7hc1Q5hqjGoV7yghw==", + "dev": true, + "dependencies": { + "@algolia/requester-common": "4.23.3" + } + }, + "node_modules/@algolia/requester-common": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.23.3.tgz", + "integrity": "sha512-xloIdr/bedtYEGcXCiF2muajyvRhwop4cMZo+K2qzNht0CMzlRkm8YsDdj5IaBhshqfgmBb3rTg4sL4/PpvLYw==", + "dev": true + }, + "node_modules/@algolia/requester-node-http": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.23.3.tgz", + "integrity": "sha512-zgu++8Uj03IWDEJM3fuNl34s746JnZOWn1Uz5taV1dFyJhVM/kTNw9Ik7YJWiUNHJQXcaD8IXD1eCb0nq/aByA==", + "dev": true, + "dependencies": { + "@algolia/requester-common": "4.23.3" + } + }, + "node_modules/@algolia/transporter": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.23.3.tgz", + "integrity": "sha512-Wjl5gttqnf/gQKJA+dafnD0Y6Yw97yvfY8R9h0dQltX1GXTgNs1zWgvtWW0tHl1EgMdhAyw189uWiZMnL3QebQ==", + "dev": true, + "dependencies": { + "@algolia/cache-common": "4.23.3", + "@algolia/logger-common": "4.23.3", + "@algolia/requester-common": "4.23.3" + } + }, + "node_modules/@antfu/utils": { + "version": "0.7.8", + "resolved": "https://registry.npmjs.org/@antfu/utils/-/utils-0.7.8.tgz", + "integrity": "sha512-rWQkqXRESdjXtc+7NRfK9lASQjpXJu1ayp7qi1d23zZorY+wBHVLHHoVcMsEnkqEBWTFqbztO7/QdJFzyEcLTg==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@babel/parser": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz", + "integrity": "sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==", + "dev": true, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@braintree/sanitize-url": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz", + "integrity": "sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==", + "dev": true + }, + "node_modules/@docsearch/css": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.6.0.tgz", + "integrity": "sha512-+sbxb71sWre+PwDK7X2T8+bhS6clcVMLwBPznX45Qu6opJcgRjAp7gYSDzVFp187J+feSj5dNBN1mJoi6ckkUQ==", + "dev": true + }, + "node_modules/@docsearch/js": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/js/-/js-3.6.0.tgz", + "integrity": "sha512-QujhqINEElrkIfKwyyyTfbsfMAYCkylInLYMRqHy7PHc8xTBQCow73tlo/Kc7oIwBrCLf0P3YhjlOeV4v8hevQ==", + "dev": true, + "dependencies": { + "@docsearch/react": "3.6.0", + "preact": "^10.0.0" + } + }, + "node_modules/@docsearch/react": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.6.0.tgz", + "integrity": "sha512-HUFut4ztcVNmqy9gp/wxNbC7pTOHhgVVkHVGCACTuLhUKUhKAF9KYHJtMiLUJxEqiFLQiuri1fWF8zqwM/cu1w==", + "dev": true, + "dependencies": { + "@algolia/autocomplete-core": "1.9.3", + "@algolia/autocomplete-preset-algolia": "1.9.3", + "@docsearch/css": "3.6.0", + "algoliasearch": "^4.19.1" + }, + "peerDependencies": { + "@types/react": ">= 16.8.0 < 19.0.0", + "react": ">= 16.8.0 < 19.0.0", + "react-dom": ">= 16.8.0 < 19.0.0", + "search-insights": ">= 1 < 3" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + }, + "search-insights": { + "optional": true + } + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", + "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "dev": true + }, + "node_modules/@mermaid-js/mermaid-mindmap": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/@mermaid-js/mermaid-mindmap/-/mermaid-mindmap-9.3.0.tgz", + "integrity": "sha512-IhtYSVBBRYviH1Ehu8gk69pMDF8DSRqXBRDMWrEfHoaMruHeaP2DXA3PBnuwsMaCdPQhlUUcy/7DBLAEIXvCAw==", + "dev": true, + "optional": true, + "dependencies": { + "@braintree/sanitize-url": "^6.0.0", + "cytoscape": "^3.23.0", + "cytoscape-cose-bilkent": "^4.1.0", + "cytoscape-fcose": "^2.1.0", + "d3": "^7.0.0", + "khroma": "^2.0.0", + "non-layered-tidy-tree-layout": "^2.0.2" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@playwright/test": { + "version": "1.44.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz", + "integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==", + "dev": true, + "dependencies": { + "playwright": "1.44.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz", + "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz", + "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz", + "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz", + "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz", + "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz", + "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz", + "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz", + "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz", + "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz", + "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz", + "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz", + "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz", + "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz", + "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz", + "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz", + "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@shikijs/core": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.6.3.tgz", + "integrity": "sha512-QnJKHFUW95GnlJLJGP6QLx4M69HM0KlXk+R2Y8lr/x4nAx1Yb/lsuxq4XwybuUjTxbJk+BT0g/kvn0bcsjGGHg==", + "dev": true + }, + "node_modules/@shikijs/transformers": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/@shikijs/transformers/-/transformers-1.6.3.tgz", + "integrity": "sha512-ptBuP/IIeqCzK3zZO/knFICZWs58uZWzbv7ND+bKOewe5NcCjZfSiMyzFwOyl23ewPJ1APjRBwLi6Asrodmmxw==", + "dev": true, + "dependencies": { + "shiki": "1.6.3" + } + }, + "node_modules/@types/d3-scale": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz", + "integrity": "sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==", + "dev": true, + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-scale-chromatic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.0.3.tgz", + "integrity": "sha512-laXM4+1o5ImZv3RpFAsTRn3TEkzqkytiOY0Dz0sq5cnd1dtNlk6sHLon4OvqaiJb28T0S/TdsBI3Sjsy+keJrw==", + "dev": true + }, + "node_modules/@types/d3-time": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.3.tgz", + "integrity": "sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==", + "dev": true + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "dev": true, + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", + "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", + "dev": true + }, + "node_modules/@types/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==", + "dev": true + }, + "node_modules/@types/markdown-it": { + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.1.tgz", + "integrity": "sha512-4NpsnpYl2Gt1ljyBGrKMxFYAYvpqbnnkgP/i/g+NLpjEUa3obn1XJCur9YbEXKDAkaXqsR1LbDnGEJ0MmKFxfg==", + "dev": true, + "dependencies": { + "@types/linkify-it": "^5", + "@types/mdurl": "^2" + } + }, + "node_modules/@types/mdast": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz", + "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==", + "dev": true, + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/@types/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==", + "dev": true + }, + "node_modules/@types/ms": { + "version": "0.7.34", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz", + "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==", + "dev": true + }, + "node_modules/@types/node": { + "version": "20.14.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.2.tgz", + "integrity": "sha512-xyu6WAMVwv6AKFLB+e/7ySZVr/0zLCzOa7rSpq6jNwpqOrUbcACDWC+53d4n2QHOnDou0fbIsg8wZu/sxrnI4Q==", + "dev": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/sax": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.7.tgz", + "integrity": "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/unist": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", + "integrity": "sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA==", + "dev": true + }, + "node_modules/@types/web-bluetooth": { + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.20.tgz", + "integrity": "sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==", + "dev": true + }, + "node_modules/@vitejs/plugin-vue": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-5.0.5.tgz", + "integrity": "sha512-LOjm7XeIimLBZyzinBQ6OSm3UBCNVCpLkxGC0oWmm2YPzVZoxMsdvNVimLTBzpAnR9hl/yn1SHGuRfe6/Td9rQ==", + "dev": true, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "vite": "^5.0.0", + "vue": "^3.2.25" + } + }, + "node_modules/@vue/compiler-core": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.4.27.tgz", + "integrity": "sha512-E+RyqY24KnyDXsCuQrI+mlcdW3ALND6U7Gqa/+bVwbcpcR3BRRIckFoz7Qyd4TTlnugtwuI7YgjbvsLmxb+yvg==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.24.4", + "@vue/shared": "3.4.27", + "entities": "^4.5.0", + "estree-walker": "^2.0.2", + "source-map-js": "^1.2.0" + } + }, + "node_modules/@vue/compiler-dom": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.4.27.tgz", + "integrity": "sha512-kUTvochG/oVgE1w5ViSr3KUBh9X7CWirebA3bezTbB5ZKBQZwR2Mwj9uoSKRMFcz4gSMzzLXBPD6KpCLb9nvWw==", + "dev": true, + "dependencies": { + "@vue/compiler-core": "3.4.27", + "@vue/shared": "3.4.27" + } + }, + "node_modules/@vue/compiler-sfc": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.4.27.tgz", + "integrity": "sha512-nDwntUEADssW8e0rrmE0+OrONwmRlegDA1pD6QhVeXxjIytV03yDqTey9SBDiALsvAd5U4ZrEKbMyVXhX6mCGA==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.24.4", + "@vue/compiler-core": "3.4.27", + "@vue/compiler-dom": "3.4.27", + "@vue/compiler-ssr": "3.4.27", + "@vue/shared": "3.4.27", + "estree-walker": "^2.0.2", + "magic-string": "^0.30.10", + "postcss": "^8.4.38", + "source-map-js": "^1.2.0" + } + }, + "node_modules/@vue/compiler-ssr": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.4.27.tgz", + "integrity": "sha512-CVRzSJIltzMG5FcidsW0jKNQnNRYC8bT21VegyMMtHmhW3UOI7knmUehzswXLrExDLE6lQCZdrhD4ogI7c+vuw==", + "dev": true, + "dependencies": { + "@vue/compiler-dom": "3.4.27", + "@vue/shared": "3.4.27" + } + }, + "node_modules/@vue/devtools-api": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-7.2.1.tgz", + "integrity": "sha512-6oNCtyFOrNdqm6GUkFujsCgFlpbsHLnZqq7edeM/+cxAbMyCWvsaCsIMUaz7AiluKLccCGEM8fhOsjaKgBvb7g==", + "dev": true, + "dependencies": { + "@vue/devtools-kit": "^7.2.1" + } + }, + "node_modules/@vue/devtools-kit": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-7.2.1.tgz", + "integrity": "sha512-Wak/fin1X0Q8LLIfCAHBrdaaB+R6IdpSXsDByPHbQ3BmkCP0/cIo/oEGp9i0U2+gEqD4L3V9RDjNf1S34DTzQQ==", + "dev": true, + "dependencies": { + "@vue/devtools-shared": "^7.2.1", + "hookable": "^5.5.3", + "mitt": "^3.0.1", + "perfect-debounce": "^1.0.0", + "speakingurl": "^14.0.1" + }, + "peerDependencies": { + "vue": "^3.0.0" + } + }, + "node_modules/@vue/devtools-shared": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-7.2.1.tgz", + "integrity": "sha512-PCJF4UknJmOal68+X9XHyVeQ+idv0LFujkTOIW30+GaMJqwFVN9LkQKX4gLqn61KkGMdJTzQ1bt7EJag3TI6AA==", + "dev": true, + "dependencies": { + "rfdc": "^1.3.1" + } + }, + "node_modules/@vue/reactivity": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.4.27.tgz", + "integrity": "sha512-kK0g4NknW6JX2yySLpsm2jlunZJl2/RJGZ0H9ddHdfBVHcNzxmQ0sS0b09ipmBoQpY8JM2KmUw+a6sO8Zo+zIA==", + "dev": true, + "dependencies": { + "@vue/shared": "3.4.27" + } + }, + "node_modules/@vue/runtime-core": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.4.27.tgz", + "integrity": "sha512-7aYA9GEbOOdviqVvcuweTLe5Za4qBZkUY7SvET6vE8kyypxVgaT1ixHLg4urtOlrApdgcdgHoTZCUuTGap/5WA==", + "dev": true, + "dependencies": { + "@vue/reactivity": "3.4.27", + "@vue/shared": "3.4.27" + } + }, + "node_modules/@vue/runtime-dom": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.4.27.tgz", + "integrity": "sha512-ScOmP70/3NPM+TW9hvVAz6VWWtZJqkbdf7w6ySsws+EsqtHvkhxaWLecrTorFxsawelM5Ys9FnDEMt6BPBDS0Q==", + "dev": true, + "dependencies": { + "@vue/runtime-core": "3.4.27", + "@vue/shared": "3.4.27", + "csstype": "^3.1.3" + } + }, + "node_modules/@vue/server-renderer": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.4.27.tgz", + "integrity": "sha512-dlAMEuvmeA3rJsOMJ2J1kXU7o7pOxgsNHVr9K8hB3ImIkSuBrIdy0vF66h8gf8Tuinf1TK3mPAz2+2sqyf3KzA==", + "dev": true, + "dependencies": { + "@vue/compiler-ssr": "3.4.27", + "@vue/shared": "3.4.27" + }, + "peerDependencies": { + "vue": "3.4.27" + } + }, + "node_modules/@vue/shared": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.27.tgz", + "integrity": "sha512-DL3NmY2OFlqmYYrzp39yi3LDkKxa5vZVwxWdQ3rG0ekuWscHraeIbnI8t+aZK7qhYqEqWKTUdijadunb9pnrgA==", + "dev": true + }, + "node_modules/@vueuse/core": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/@vueuse/core/-/core-10.10.0.tgz", + "integrity": "sha512-vexJ/YXYs2S42B783rI95lMt3GzEwkxzC8Hb0Ndpd8rD+p+Lk/Za4bd797Ym7yq4jXqdSyj3JLChunF/vyYjUw==", + "dev": true, + "dependencies": { + "@types/web-bluetooth": "^0.0.20", + "@vueuse/metadata": "10.10.0", + "@vueuse/shared": "10.10.0", + "vue-demi": ">=0.14.7" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@vueuse/core/node_modules/vue-demi": { + "version": "0.14.8", + "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.8.tgz", + "integrity": "sha512-Uuqnk9YE9SsWeReYqK2alDI5YzciATE0r2SkA6iMAtuXvNTMNACJLJEXNXaEy94ECuBe4Sk6RzRU80kjdbIo1Q==", + "dev": true, + "hasInstallScript": true, + "bin": { + "vue-demi-fix": "bin/vue-demi-fix.js", + "vue-demi-switch": "bin/vue-demi-switch.js" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.0.0-rc.1", + "vue": "^3.0.0-0 || ^2.6.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + } + } + }, + "node_modules/@vueuse/integrations": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/@vueuse/integrations/-/integrations-10.10.0.tgz", + "integrity": "sha512-vHGeK7X6mkdkpcm1eE9t3Cpm21pNVfZRwrjwwbrEs9XftnSgszF4831G2rei8Dt9cIYJIfFV+iyx/29muimJPQ==", + "dev": true, + "dependencies": { + "@vueuse/core": "10.10.0", + "@vueuse/shared": "10.10.0", + "vue-demi": ">=0.14.7" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "async-validator": "*", + "axios": "*", + "change-case": "*", + "drauu": "*", + "focus-trap": "*", + "fuse.js": "*", + "idb-keyval": "*", + "jwt-decode": "*", + "nprogress": "*", + "qrcode": "*", + "sortablejs": "*", + "universal-cookie": "*" + }, + "peerDependenciesMeta": { + "async-validator": { + "optional": true + }, + "axios": { + "optional": true + }, + "change-case": { + "optional": true + }, + "drauu": { + "optional": true + }, + "focus-trap": { + "optional": true + }, + "fuse.js": { + "optional": true + }, + "idb-keyval": { + "optional": true + }, + "jwt-decode": { + "optional": true + }, + "nprogress": { + "optional": true + }, + "qrcode": { + "optional": true + }, + "sortablejs": { + "optional": true + }, + "universal-cookie": { + "optional": true + } + } + }, + "node_modules/@vueuse/integrations/node_modules/vue-demi": { + "version": "0.14.8", + "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.8.tgz", + "integrity": "sha512-Uuqnk9YE9SsWeReYqK2alDI5YzciATE0r2SkA6iMAtuXvNTMNACJLJEXNXaEy94ECuBe4Sk6RzRU80kjdbIo1Q==", + "dev": true, + "hasInstallScript": true, + "bin": { + "vue-demi-fix": "bin/vue-demi-fix.js", + "vue-demi-switch": "bin/vue-demi-switch.js" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.0.0-rc.1", + "vue": "^3.0.0-0 || ^2.6.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + } + } + }, + "node_modules/@vueuse/metadata": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/@vueuse/metadata/-/metadata-10.10.0.tgz", + "integrity": "sha512-UNAo2sTCAW5ge6OErPEHb5z7NEAg3XcO9Cj7OK45aZXfLLH1QkexDcZD77HBi5zvEiLOm1An+p/4b5K3Worpug==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@vueuse/shared": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/@vueuse/shared/-/shared-10.10.0.tgz", + "integrity": "sha512-2aW33Ac0Uk0U+9yo3Ypg9s5KcR42cuehRWl7vnUHadQyFvCktseyxxEPBi1Eiq4D2yBGACOnqLZpx1eMc7g5Og==", + "dev": true, + "dependencies": { + "vue-demi": ">=0.14.7" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@vueuse/shared/node_modules/vue-demi": { + "version": "0.14.8", + "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.8.tgz", + "integrity": "sha512-Uuqnk9YE9SsWeReYqK2alDI5YzciATE0r2SkA6iMAtuXvNTMNACJLJEXNXaEy94ECuBe4Sk6RzRU80kjdbIo1Q==", + "dev": true, + "hasInstallScript": true, + "bin": { + "vue-demi-fix": "bin/vue-demi-fix.js", + "vue-demi-switch": "bin/vue-demi-switch.js" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.0.0-rc.1", + "vue": "^3.0.0-0 || ^2.6.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + } + } + }, + "node_modules/algoliasearch": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.23.3.tgz", + "integrity": "sha512-Le/3YgNvjW9zxIQMRhUHuhiUjAlKY/zsdZpfq4dlLqg6mEm0nL6yk+7f2hDOtLpxsgE4jSzDmvHL7nXdBp5feg==", + "dev": true, + "dependencies": { + "@algolia/cache-browser-local-storage": "4.23.3", + "@algolia/cache-common": "4.23.3", + "@algolia/cache-in-memory": "4.23.3", + "@algolia/client-account": "4.23.3", + "@algolia/client-analytics": "4.23.3", + "@algolia/client-common": "4.23.3", + "@algolia/client-personalization": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/logger-common": "4.23.3", + "@algolia/logger-console": "4.23.3", + "@algolia/recommend": "4.23.3", + "@algolia/requester-browser-xhr": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/requester-node-http": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/commander": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.0.0.tgz", + "integrity": "sha512-MwVNWlYjDTtOjX5PiD7o5pK0UrFU/OYgcJfjjK4RaHZETNtjJqrZa9Y9ds88+A+f+d5lv+561eZ+yCKoS3gbAA==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/cose-base": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", + "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", + "dev": true, + "dependencies": { + "layout-base": "^1.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "dev": true + }, + "node_modules/cytoscape": { + "version": "3.29.2", + "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.29.2.tgz", + "integrity": "sha512-2G1ycU28Nh7OHT9rkXRLpCDP30MKH1dXJORZuBhtEhEW7pKwgPi77ImqlCWinouyE1PNepIOGZBOrE84DG7LyQ==", + "dev": true, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/cytoscape-cose-bilkent": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", + "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", + "dev": true, + "dependencies": { + "cose-base": "^1.0.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/cytoscape-fcose": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz", + "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==", + "dev": true, + "optional": true, + "dependencies": { + "cose-base": "^2.2.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/cytoscape-fcose/node_modules/cose-base": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz", + "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==", + "dev": true, + "optional": true, + "dependencies": { + "layout-base": "^2.0.0" + } + }, + "node_modules/cytoscape-fcose/node_modules/layout-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz", + "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==", + "dev": true, + "optional": true + }, + "node_modules/d3": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", + "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "dev": true, + "dependencies": { + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "dev": true, + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "dev": true, + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-chord": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "dev": true, + "dependencies": { + "d3-path": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-contour": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", + "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "dev": true, + "dependencies": { + "d3-array": "^3.2.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "dev": true, + "dependencies": { + "delaunator": "5" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "dev": true, + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "dev": true, + "dependencies": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" + }, + "bin": { + "csv2json": "bin/dsv2json.js", + "csv2tsv": "bin/dsv2dsv.js", + "dsv2dsv": "bin/dsv2dsv.js", + "dsv2json": "bin/dsv2json.js", + "json2csv": "bin/json2dsv.js", + "json2dsv": "bin/json2dsv.js", + "json2tsv": "bin/json2dsv.js", + "tsv2csv": "bin/dsv2dsv.js", + "tsv2json": "bin/dsv2json.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "dev": true, + "dependencies": { + "d3-dsv": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "dev": true, + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-geo": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", + "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "dev": true, + "dependencies": { + "d3-array": "2.5.0 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "dev": true, + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-quadtree": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-sankey": { + "version": "0.12.3", + "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", + "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", + "dev": true, + "dependencies": { + "d3-array": "1 - 2", + "d3-shape": "^1.2.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-array": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", + "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", + "dev": true, + "dependencies": { + "internmap": "^1.0.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-path": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", + "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==", + "dev": true + }, + "node_modules/d3-sankey/node_modules/d3-shape": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", + "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", + "dev": true, + "dependencies": { + "d3-path": "1" + } + }, + "node_modules/d3-sankey/node_modules/internmap": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", + "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==", + "dev": true + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "dev": true, + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "dev": true, + "dependencies": { + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "dev": true, + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "dev": true, + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "dev": true, + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "dev": true, + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "d3-selection": "2 - 3" + } + }, + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "dev": true, + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/dagre-d3-es": { + "version": "7.0.10", + "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.10.tgz", + "integrity": "sha512-qTCQmEhcynucuaZgY5/+ti3X/rnszKZhEQH/ZdWdtP1tA/y3VoHJzcVrO9pjjJCNpigfscAtoUB5ONcd2wNn0A==", + "dev": true, + "dependencies": { + "d3": "^7.8.2", + "lodash-es": "^4.17.21" + } + }, + "node_modules/dayjs": { + "version": "1.11.11", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.11.tgz", + "integrity": "sha512-okzr3f11N6WuqYtZSvm+F776mB41wRZMhKP+hc34YdW+KmtYYK9iqvHSwo2k9FEH3fhGXvOPV6yz2IcSrfRUDg==", + "dev": true + }, + "node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decode-named-character-reference": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", + "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==", + "dev": true, + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/delaunator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", + "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "dev": true, + "dependencies": { + "robust-predicates": "^3.0.2" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dompurify": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.5.tgz", + "integrity": "sha512-lwG+n5h8QNpxtyrJW/gJWckL+1/DQiYMX8f7t8Z2AZTPw1esVrqjI63i7Zc2Gz0aKzLVMYC1V1PL/ky+aY/NgA==", + "dev": true + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, + "node_modules/elkjs": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/elkjs/-/elkjs-0.9.3.tgz", + "integrity": "sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==", + "dev": true + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/focus-trap": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-7.5.4.tgz", + "integrity": "sha512-N7kHdlgsO/v+iD/dMoJKtsSqs5Dz/dXZVebRgJw23LDk+jMi/974zyiOYDziY2JPp8xivq9BmUGwIJMiuSBi7w==", + "dev": true, + "dependencies": { + "tabbable": "^6.2.0" + } + }, + "node_modules/foreground-child": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", + "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-stdin": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-9.0.0.tgz", + "integrity": "sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "10.3.16", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.16.tgz", + "integrity": "sha512-JDKXl1DiuuHJ6fVS2FXjownaavciiHNUU4mOvV/B793RLh05vZL1rcPnCSaOgv1hDT6RDlY7AB7ZUvFYAtPgAw==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.1", + "minipass": "^7.0.4", + "path-scurry": "^1.11.0" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/hookable": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-5.5.3.tgz", + "integrity": "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==", + "dev": true + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/ini": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz", + "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/jackspeak": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.0.tgz", + "integrity": "sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==", + "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsonc-parser": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.1.tgz", + "integrity": "sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==", + "dev": true + }, + "node_modules/jsonpointer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz", + "integrity": "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/katex": { + "version": "0.16.10", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.10.tgz", + "integrity": "sha512-ZiqaC04tp2O5utMsl2TEZTXxa6WSC4yo0fv5ML++D3QZv/vx2Mct0mTlRx3O+uUkjfuAgOkzsCmq5MiUEsDDdA==", + "dev": true, + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "dependencies": { + "commander": "^8.3.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "dev": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/khroma": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", + "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==", + "dev": true + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/layout-base": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", + "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==", + "dev": true + }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dev": true, + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", + "dev": true, + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/magic-string": { + "version": "0.30.10", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.10.tgz", + "integrity": "sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + } + }, + "node_modules/mark.js": { + "version": "8.11.1", + "resolved": "https://registry.npmjs.org/mark.js/-/mark.js-8.11.1.tgz", + "integrity": "sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==", + "dev": true + }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/markdown-it-footnote": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/markdown-it-footnote/-/markdown-it-footnote-4.0.0.tgz", + "integrity": "sha512-WYJ7urf+khJYl3DqofQpYfEYkZKbmXmwxQV8c8mO/hGIhgZ1wOe7R4HLFNwqx7TjILbnC98fuyeSsin19JdFcQ==", + "dev": true + }, + "node_modules/markdownlint": { + "version": "0.34.0", + "resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.34.0.tgz", + "integrity": "sha512-qwGyuyKwjkEMOJ10XN6OTKNOVYvOIi35RNvDLNxTof5s8UmyGHlCdpngRHoRGNvQVGuxO3BJ7uNSgdeX166WXw==", + "dev": true, + "dependencies": { + "markdown-it": "14.1.0", + "markdownlint-micromark": "0.1.9" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/DavidAnson" + } + }, + "node_modules/markdownlint-cli": { + "version": "0.40.0", + "resolved": "https://registry.npmjs.org/markdownlint-cli/-/markdownlint-cli-0.40.0.tgz", + "integrity": "sha512-JXhI3dRQcaqwiFYpPz6VJ7aKYheD53GmTz9y4D/d0F1MbZDGOp9pqKlbOfUX/pHP/iAoeiE4wYRmk8/kjLakxA==", + "dev": true, + "dependencies": { + "commander": "~12.0.0", + "get-stdin": "~9.0.0", + "glob": "~10.3.12", + "ignore": "~5.3.1", + "js-yaml": "^4.1.0", + "jsonc-parser": "~3.2.1", + "jsonpointer": "5.0.1", + "markdownlint": "~0.34.0", + "minimatch": "~9.0.4", + "run-con": "~1.3.2", + "toml": "~3.0.0" + }, + "bin": { + "markdownlint": "markdownlint.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/markdownlint-micromark": { + "version": "0.1.9", + "resolved": "https://registry.npmjs.org/markdownlint-micromark/-/markdownlint-micromark-0.1.9.tgz", + "integrity": "sha512-5hVs/DzAFa8XqYosbEAEg6ok6MF2smDj89ztn9pKkCtdKHVdPQuGMH7frFfYL9mLkvfFe4pTyAMffLbjf3/EyA==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/DavidAnson" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz", + "integrity": "sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==", + "dev": true, + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "mdast-util-to-string": "^3.1.0", + "micromark": "^3.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-decode-string": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-stringify-position": "^3.0.0", + "uvu": "^0.5.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dev": true, + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/mermaid": { + "version": "10.9.3", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.9.3.tgz", + "integrity": "sha512-V80X1isSEvAewIL3xhmz/rVmc27CVljcsbWxkxlWJWY/1kQa4XOABqpDl2qQLGKzpKm6WbTfUEKImBlUfFYArw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@braintree/sanitize-url": "^6.0.1", + "@types/d3-scale": "^4.0.3", + "@types/d3-scale-chromatic": "^3.0.0", + "cytoscape": "^3.28.1", + "cytoscape-cose-bilkent": "^4.1.0", + "d3": "^7.4.0", + "d3-sankey": "^0.12.3", + "dagre-d3-es": "7.0.10", + "dayjs": "^1.11.7", + "dompurify": "^3.0.5 <3.1.7", + "elkjs": "^0.9.0", + "katex": "^0.16.9", + "khroma": "^2.0.0", + "lodash-es": "^4.17.21", + "mdast-util-from-markdown": "^1.3.0", + "non-layered-tidy-tree-layout": "^2.0.2", + "stylis": "^4.1.3", + "ts-dedent": "^2.2.0", + "uuid": "^9.0.0", + "web-worker": "^1.2.0" + } + }, + "node_modules/micromark": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz", + "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "micromark-core-commonmark": "^1.0.1", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz", + "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-factory-destination": "^1.0.0", + "micromark-factory-label": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-factory-title": "^1.0.0", + "micromark-factory-whitespace": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-html-tag-name": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-factory-destination": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz", + "integrity": "sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz", + "integrity": "sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz", + "integrity": "sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz", + "integrity": "sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz", + "integrity": "sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.2.0.tgz", + "integrity": "sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz", + "integrity": "sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz", + "integrity": "sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz", + "integrity": "sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz", + "integrity": "sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-decode-string": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz", + "integrity": "sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz", + "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-html-tag-name": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz", + "integrity": "sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz", + "integrity": "sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz", + "integrity": "sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz", + "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz", + "integrity": "sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz", + "integrity": "sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-types": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz", + "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minisearch": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/minisearch/-/minisearch-6.3.0.tgz", + "integrity": "sha512-ihFnidEeU8iXzcVHy74dhkxh/dn8Dc08ERl0xwoMMGqp4+LvRSCgicb+zGqWthVokQKvCSxITlh3P08OzdTYCQ==", + "dev": true + }, + "node_modules/mitt": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz", + "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==", + "dev": true + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", + "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/non-layered-tidy-tree-layout": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/non-layered-tidy-tree-layout/-/non-layered-tidy-tree-layout-2.0.2.tgz", + "integrity": "sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==", + "dev": true + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/perfect-debounce": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz", + "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==", + "dev": true + }, + "node_modules/picocolors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/playwright": { + "version": "1.44.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz", + "integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==", + "dev": true, + "dependencies": { + "playwright-core": "1.44.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/playwright-core": { + "version": "1.44.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz", + "integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==", + "dev": true, + "bin": { + "playwright-core": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/postcss": { + "version": "8.4.47", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", + "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/preact": { + "version": "10.22.0", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.22.0.tgz", + "integrity": "sha512-RRurnSjJPj4rp5K6XoP45Ui33ncb7e4H7WiOHVpjbkvqvA3U+N8Z6Qbo0AE6leGYBV66n8EhEaFixvIu3SkxFw==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/preact" + } + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.1.tgz", + "integrity": "sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==", + "dev": true + }, + "node_modules/robust-predicates": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", + "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", + "dev": true + }, + "node_modules/rollup": { + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz", + "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==", + "dev": true, + "dependencies": { + "@types/estree": "1.0.5" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.22.4", + "@rollup/rollup-android-arm64": "4.22.4", + "@rollup/rollup-darwin-arm64": "4.22.4", + "@rollup/rollup-darwin-x64": "4.22.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.22.4", + "@rollup/rollup-linux-arm-musleabihf": "4.22.4", + "@rollup/rollup-linux-arm64-gnu": "4.22.4", + "@rollup/rollup-linux-arm64-musl": "4.22.4", + "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4", + "@rollup/rollup-linux-riscv64-gnu": "4.22.4", + "@rollup/rollup-linux-s390x-gnu": "4.22.4", + "@rollup/rollup-linux-x64-gnu": "4.22.4", + "@rollup/rollup-linux-x64-musl": "4.22.4", + "@rollup/rollup-win32-arm64-msvc": "4.22.4", + "@rollup/rollup-win32-ia32-msvc": "4.22.4", + "@rollup/rollup-win32-x64-msvc": "4.22.4", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-con": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/run-con/-/run-con-1.3.2.tgz", + "integrity": "sha512-CcfE+mYiTcKEzg0IqS08+efdnH0oJ3zV0wSUFBNrMHMuxCtXvBCLzCJHatwuXDcu/RlhjTziTo/a1ruQik6/Yg==", + "dev": true, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~4.1.0", + "minimist": "^1.2.8", + "strip-json-comments": "~3.1.1" + }, + "bin": { + "run-con": "cli.js" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rw": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", + "dev": true + }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "dev": true, + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "node_modules/sax": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz", + "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==", + "dev": true + }, + "node_modules/search-insights": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/search-insights/-/search-insights-2.14.0.tgz", + "integrity": "sha512-OLN6MsPMCghDOqlCtsIsYgtsC0pnwVTyT9Mu6A3ewOj1DxvzZF6COrn2g86E/c05xbktB0XN04m/t1Z+n+fTGw==", + "dev": true, + "peer": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/shiki": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.6.3.tgz", + "integrity": "sha512-lE1/YGlzFY0hQSyEfsZj18xGrTWxyhFQkaiILALqTBZPbJeYFWpbUhlmTGPOupYB/qC+H6sV4UznJzcEh3WMHQ==", + "dev": true, + "dependencies": { + "@shikijs/core": "1.6.3" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sitemap": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/sitemap/-/sitemap-8.0.0.tgz", + "integrity": "sha512-+AbdxhM9kJsHtruUF39bwS/B0Fytw6Fr1o4ZAIAEqA6cke2xcoO2GleBw9Zw7nRzILVEgz7zBM5GiTJjie1G9A==", + "dev": true, + "dependencies": { + "@types/node": "^17.0.5", + "@types/sax": "^1.2.1", + "arg": "^5.0.0", + "sax": "^1.2.4" + }, + "bin": { + "sitemap": "dist/cli.js" + }, + "engines": { + "node": ">=14.0.0", + "npm": ">=6.0.0" + } + }, + "node_modules/sitemap-ts": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/sitemap-ts/-/sitemap-ts-1.7.3.tgz", + "integrity": "sha512-xeCbwZvYQfZeKyaCGsPnBZgmLjKGa4BQfVY5O/JHhd0LEvoJM4PIvYBN8zyIl97q+gtxmDaNMclVRxT6FGxyHQ==", + "dev": true, + "dependencies": { + "@antfu/utils": "^0.7.8", + "fast-glob": "^3.3.2", + "sitemap": "^8.0.0", + "xml-formatter": "^3.6.2" + } + }, + "node_modules/sitemap/node_modules/@types/node": { + "version": "17.0.45", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", + "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", + "dev": true + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/speakingurl": { + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/speakingurl/-/speakingurl-14.0.1.tgz", + "integrity": "sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/stylis": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.2.tgz", + "integrity": "sha512-bhtUjWd/z6ltJiQwg0dUfxEJ+W+jdqQd8TbWLWyeIJHlnsqmGLRFFd8e5mA0AZi/zx90smXRlN66YMTcaSFifg==", + "dev": true + }, + "node_modules/tabbable": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz", + "integrity": "sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==", + "dev": true + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toml": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", + "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==", + "dev": true + }, + "node_modules/ts-dedent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", + "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", + "dev": true, + "engines": { + "node": ">=6.10" + } + }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", + "dev": true + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true + }, + "node_modules/unist-util-stringify-position": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz", + "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==", + "dev": true, + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/uvu": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz", + "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==", + "dev": true, + "dependencies": { + "dequal": "^2.0.0", + "diff": "^5.0.0", + "kleur": "^4.0.3", + "sade": "^1.7.3" + }, + "bin": { + "uvu": "bin.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/vite": { + "version": "5.4.8", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.8.tgz", + "integrity": "sha512-FqrItQ4DT1NC4zCUqMB4c4AZORMKIa0m8/URVCZ77OZ/QSNeJ54bU1vrFADbDsuwfIPcgknRkmqakQcgnL4GiQ==", + "dev": true, + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/vitepress": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/vitepress/-/vitepress-1.2.3.tgz", + "integrity": "sha512-GvEsrEeNLiDE1+fuwDAYJCYLNZDAna+EtnXlPajhv/MYeTjbNK6Bvyg6NoTdO1sbwuQJ0vuJR99bOlH53bo6lg==", + "dev": true, + "dependencies": { + "@docsearch/css": "^3.6.0", + "@docsearch/js": "^3.6.0", + "@shikijs/core": "^1.6.2", + "@shikijs/transformers": "^1.6.2", + "@types/markdown-it": "^14.1.1", + "@vitejs/plugin-vue": "^5.0.5", + "@vue/devtools-api": "^7.2.1", + "@vue/shared": "^3.4.27", + "@vueuse/core": "^10.10.0", + "@vueuse/integrations": "^10.10.0", + "focus-trap": "^7.5.4", + "mark.js": "8.11.1", + "minisearch": "^6.3.0", + "shiki": "^1.6.2", + "vite": "^5.2.12", + "vue": "^3.4.27" + }, + "bin": { + "vitepress": "bin/vitepress.js" + }, + "peerDependencies": { + "markdown-it-mathjax3": "^4", + "postcss": "^8" + }, + "peerDependenciesMeta": { + "markdown-it-mathjax3": { + "optional": true + }, + "postcss": { + "optional": true + } + } + }, + "node_modules/vitepress-plugin-mermaid": { + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/vitepress-plugin-mermaid/-/vitepress-plugin-mermaid-2.0.16.tgz", + "integrity": "sha512-sW0Eu4+1EzRdwZBMGjzwKDsbQiuJIxCy8BlMw7Ur88p9fXalrFYKqZ3wYWLxsFTBipeooFIeanef/xw1P+v7vQ==", + "dev": true, + "optionalDependencies": { + "@mermaid-js/mermaid-mindmap": "^9.3.0" + }, + "peerDependencies": { + "mermaid": "10", + "vitepress": "^1.0.0 || ^1.0.0-alpha" + } + }, + "node_modules/vue": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.4.27.tgz", + "integrity": "sha512-8s/56uK6r01r1icG/aEOHqyMVxd1bkYcSe9j8HcKtr/xTOFWvnzIVTehNW+5Yt89f+DLBe4A569pnZLS5HzAMA==", + "dev": true, + "dependencies": { + "@vue/compiler-dom": "3.4.27", + "@vue/compiler-sfc": "3.4.27", + "@vue/runtime-dom": "3.4.27", + "@vue/server-renderer": "3.4.27", + "@vue/shared": "3.4.27" + }, + "peerDependencies": { + "typescript": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/web-worker": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.3.0.tgz", + "integrity": "sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==", + "dev": true + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/xml-formatter": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/xml-formatter/-/xml-formatter-3.6.2.tgz", + "integrity": "sha512-enWhevZNOwffZFUhzl1WMcha8lFLZUgJ7NzFs5Ug4ZOFCoNheGYXz1J9Iz/e+cTn9rCkuT1GwTacz+YlmFHOGw==", + "dev": true, + "dependencies": { + "xml-parser-xo": "^4.1.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/xml-parser-xo": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/xml-parser-xo/-/xml-parser-xo-4.1.1.tgz", + "integrity": "sha512-Ggf2y90+Y6e9IK5hoPuembVHJ03PhDSdhldEmgzbihzu9k0XBo0sfcFxaSi4W1PlUSSI1ok+MJ0JCXUn+U4Ilw==", + "dev": true, + "engines": { + "node": ">= 14" + } + } + } +} diff --git a/package.json b/package.json index a155786a1a..bdd8fe208c 100644 --- a/package.json +++ b/package.json @@ -1,16 +1,23 @@ { "license": "Apache-2.0", + "type": "module", "devDependencies": { - "@vuepress/client": "2.0.0-rc.0", - "@vuepress/plugin-docsearch": "2.0.0-rc.0", - "@vuepress/plugin-google-analytics": "2.0.0-rc.0", - "@vuepress/utils": "2.0.0-rc.0", - "vue": "^3.3.11", - "vuepress": "2.0.0-rc.0", - "vuepress-plugin-sitemap2": "2.0.0-rc.4" + "@playwright/test": "^1.44.0", + "@types/node": "^20.12.12", + "@vueuse/core": "^10.9.0", + "markdown-it-footnote": "^4.0.0", + "markdownlint-cli": "^0.40.0", + "mermaid": "^10.9.1", + "sitemap-ts": "^1.7.3", + "vitepress": "^1.2.3", + "vitepress-plugin-mermaid": "^2.0.16", + "vue": "^3.4.27" }, "scripts": { - "website:dev": "vuepress dev runatlantis.io", - "website:build": "NODE_OPTIONS=--openssl-legacy-provider vuepress build runatlantis.io" + "website:dev": "vitepress dev --host localhost --port 8080 runatlantis.io", + "website:lint": "markdownlint runatlantis.io", + "website:lint-fix": "markdownlint --fix runatlantis.io", + "website:build": "vitepress build runatlantis.io", + "e2e": "playwright test" } } diff --git a/playwright.config.cjs b/playwright.config.cjs new file mode 100644 index 0000000000..e3411787bd --- /dev/null +++ b/playwright.config.cjs @@ -0,0 +1,3 @@ +module.exports = { + testDir: './runatlantis.io/e2e' +}; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 51eeb5c138..75965cc606 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,214 +1,159 @@ -lockfileVersion: '6.0' +lockfileVersion: '9.0' settings: autoInstallPeers: true excludeLinksFromLockfile: false -devDependencies: - '@vuepress/client': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0 - '@vuepress/plugin-docsearch': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0(@algolia/client-search@4.21.1)(search-insights@2.13.0) - '@vuepress/plugin-google-analytics': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0 - '@vuepress/utils': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0 - vue: - specifier: ^3.3.11 - version: 3.3.11 - vuepress: - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - vuepress-plugin-sitemap2: - specifier: 2.0.0-rc.4 - version: 2.0.0-rc.4(vuepress@2.0.0-rc.0) +importers: + + .: + devDependencies: + '@playwright/test': + specifier: ^1.44.0 + version: 1.44.0 + '@types/node': + specifier: ^20.12.12 + version: 20.12.12 + '@vueuse/core': + specifier: ^10.9.0 + version: 10.9.0(vue@3.4.27) + markdown-it-footnote: + specifier: ^4.0.0 + version: 4.0.0 + markdownlint-cli: + specifier: ^0.40.0 + version: 0.40.0 + mermaid: + specifier: ^10.9.3 + version: 10.9.3 + sitemap-ts: + specifier: ^1.7.3 + version: 1.8.0 + vitepress: + specifier: ^1.2.3 + version: 1.3.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.47)(sass@1.77.2)(search-insights@2.13.0) + vitepress-plugin-mermaid: + specifier: ^2.0.16 + version: 2.0.16(mermaid@10.9.3)(vitepress@1.3.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.47)(sass@1.77.2)(search-insights@2.13.0)) + vue: + specifier: ^3.4.27 + version: 3.4.27 packages: - /@algolia/autocomplete-core@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0): + '@algolia/autocomplete-core@1.9.3': resolution: {integrity: sha512-009HdfugtGCdC4JdXUbVJClA0q0zh24yyePn+KUGk3rP7j8FEe/m5Yo/z65gn6nP/cM39PxpzqKrL7A6fP6PPw==} - dependencies: - '@algolia/autocomplete-plugin-algolia-insights': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0) - '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - transitivePeerDependencies: - - '@algolia/client-search' - - algoliasearch - - search-insights - dev: true - /@algolia/autocomplete-plugin-algolia-insights@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0): + '@algolia/autocomplete-plugin-algolia-insights@1.9.3': resolution: {integrity: sha512-a/yTUkcO/Vyy+JffmAnTWbr4/90cLzw+CC3bRbhnULr/EM0fGNvM13oQQ14f2moLMcVDyAx/leczLlAOovhSZg==} peerDependencies: search-insights: '>= 1 < 3' - dependencies: - '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - search-insights: 2.13.0 - transitivePeerDependencies: - - '@algolia/client-search' - - algoliasearch - dev: true - /@algolia/autocomplete-preset-algolia@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1): + '@algolia/autocomplete-preset-algolia@1.9.3': resolution: {integrity: sha512-d4qlt6YmrLMYy95n5TB52wtNDr6EgAIPH81dvvvW8UmuWRgxEtY0NJiPwl/h95JtG2vmRM804M0DSwMCNZlzRA==} peerDependencies: '@algolia/client-search': '>= 4.9.1 < 6' algoliasearch: '>= 4.9.1 < 6' - dependencies: - '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - '@algolia/client-search': 4.21.1 - algoliasearch: 4.21.1 - dev: true - /@algolia/autocomplete-shared@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1): + '@algolia/autocomplete-shared@1.9.3': resolution: {integrity: sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ==} peerDependencies: '@algolia/client-search': '>= 4.9.1 < 6' algoliasearch: '>= 4.9.1 < 6' - dependencies: - '@algolia/client-search': 4.21.1 - algoliasearch: 4.21.1 - dev: true - /@algolia/cache-browser-local-storage@4.21.1: - resolution: {integrity: sha512-vUkac/vgj8inyGR/IgunRjTOQ6IlBwl7afFkIfUZRqbqKKXBs+A/g5wgH+UnAlCSW8wjFRAIfCzuvSRb1/qjsQ==} - dependencies: - '@algolia/cache-common': 4.21.1 - dev: true + '@algolia/cache-browser-local-storage@4.23.3': + resolution: {integrity: sha512-vRHXYCpPlTDE7i6UOy2xE03zHF2C8MEFjPN2v7fRbqVpcOvAUQK81x3Kc21xyb5aSIpYCjWCZbYZuz8Glyzyyg==} - /@algolia/cache-common@4.21.1: - resolution: {integrity: sha512-HUo4fRk8KXFMyCASW0k+Kl8iXBoRPdqAjV9OVaFibTNg1dbwnpe6eIxbSTM6AJ2X82ic/8x3GuAO8zF/E515PA==} - dev: true + '@algolia/cache-common@4.23.3': + resolution: {integrity: sha512-h9XcNI6lxYStaw32pHpB1TMm0RuxphF+Ik4o7tcQiodEdpKK+wKufY6QXtba7t3k8eseirEMVB83uFFF3Nu54A==} - /@algolia/cache-in-memory@4.21.1: - resolution: {integrity: sha512-+l2pLg6yIwRaGNtv41pGF/f/e9Qk80FeYE41f4OXS9lb5vpyrxzqM5nUaffWk/ZSFrPDuw5J2E226c//tIIffA==} - dependencies: - '@algolia/cache-common': 4.21.1 - dev: true + '@algolia/cache-in-memory@4.23.3': + resolution: {integrity: sha512-yvpbuUXg/+0rbcagxNT7un0eo3czx2Uf0y4eiR4z4SD7SiptwYTpbuS0IHxcLHG3lq22ukx1T6Kjtk/rT+mqNg==} - /@algolia/client-account@4.21.1: - resolution: {integrity: sha512-AC6SjA9n38th73gAUqcjsuxNUChpwaflaAhPL0qO9cUICN67njpQrnYaoSVZ/yx0opG5zQFRKbpEcuPGj0XjhQ==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/client-search': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-account@4.23.3': + resolution: {integrity: sha512-hpa6S5d7iQmretHHF40QGq6hz0anWEHGlULcTIT9tbUssWUriN9AUXIFQ8Ei4w9azD0hc1rUok9/DeQQobhQMA==} - /@algolia/client-analytics@4.21.1: - resolution: {integrity: sha512-q6AxvAcBl4fNZXZsMwRRQXcsxUv0PK5eUAz/lHDvgkMWAg6cP7Fl+WIq0fHcG7cJA4EHf2sT5fV6Z+yUlf7NfA==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/client-search': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-analytics@4.23.3': + resolution: {integrity: sha512-LBsEARGS9cj8VkTAVEZphjxTjMVCci+zIIiRhpFun9jGDUlS1XmhCW7CTrnaWeIuCQS/2iPyRqSy1nXPjcBLRA==} - /@algolia/client-common@4.21.1: - resolution: {integrity: sha512-LOH7ncYwY/x7epOgxc/MIuV7m3qzl00wIjDG5/9rgImFpkV0X+D/ndJI9DmPsIx7yaTLd5xv/XYuKLcvrUR0eQ==} - dependencies: - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-common@4.23.3': + resolution: {integrity: sha512-l6EiPxdAlg8CYhroqS5ybfIczsGUIAC47slLPOMDeKSVXYG1n0qGiz4RjAHLw2aD0xzh2EXZ7aRguPfz7UKDKw==} - /@algolia/client-personalization@4.21.1: - resolution: {integrity: sha512-u2CyQjHbyVwPqM5eSXd/o+rh1Pk949P/MO6s+OxyEGg6/R2YpYvmsafVZl9Q+xqT8pFaf5QygfcqlSdMUDHV5Q==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-personalization@4.23.3': + resolution: {integrity: sha512-3E3yF3Ocr1tB/xOZiuC3doHQBQ2zu2MPTYZ0d4lpfWads2WTKG7ZzmGnsHmm63RflvDeLK/UVx7j2b3QuwKQ2g==} - /@algolia/client-search@4.21.1: - resolution: {integrity: sha512-3KqSmMkQmF+ACY/Ms5TdcvrcK8iqgQP/N0EPnNUUP4LMUzAACpLLTdzA+AtCuc6oaz5ITtGJBVdPUljj5Jf/Lg==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-search@4.23.3': + resolution: {integrity: sha512-P4VAKFHqU0wx9O+q29Q8YVuaowaZ5EM77rxfmGnkHUJggh28useXQdopokgwMeYw2XUht49WX5RcTQ40rZIabw==} - /@algolia/logger-common@4.21.1: - resolution: {integrity: sha512-9AyYpR2OO9vPkkDlpTtW2/6nX+RmMd7LUwzJiAF3uN+BYUiQqgXEp+oGaH8UC0dgetmK7wJO6hw4b39cnTdEpw==} - dev: true + '@algolia/logger-common@4.23.3': + resolution: {integrity: sha512-y9kBtmJwiZ9ZZ+1Ek66P0M68mHQzKRxkW5kAAXYN/rdzgDN0d2COsViEFufxJ0pb45K4FRcfC7+33YB4BLrZ+g==} - /@algolia/logger-console@4.21.1: - resolution: {integrity: sha512-9wizQiQ8kL4DiBmT82i403UwacNuv+0hpfsfaWYZQrGjpzG+yvXETWM4AgwFZLj007esuKQiGfOPUoYFZNkGGA==} - dependencies: - '@algolia/logger-common': 4.21.1 - dev: true + '@algolia/logger-console@4.23.3': + resolution: {integrity: sha512-8xoiseoWDKuCVnWP8jHthgaeobDLolh00KJAdMe9XPrWPuf1by732jSpgy2BlsLTaT9m32pHI8CRfrOqQzHv3A==} - /@algolia/requester-browser-xhr@4.21.1: - resolution: {integrity: sha512-9NudesJLuXtRHV+JD8fTkrsdVj/oAPQbtLnxBbSQeMduzV6+a7W+G9VuWo5fwFymCdXR8/Hb6jy8D1owQIq5Gw==} - dependencies: - '@algolia/requester-common': 4.21.1 - dev: true + '@algolia/recommend@4.23.3': + resolution: {integrity: sha512-9fK4nXZF0bFkdcLBRDexsnGzVmu4TSYZqxdpgBW2tEyfuSSY54D4qSRkLmNkrrz4YFvdh2GM1gA8vSsnZPR73w==} - /@algolia/requester-common@4.21.1: - resolution: {integrity: sha512-KtX2Ep3C43XxoN3xKw755cdf9enE6gPgzh6ufZQRJBl4rYCOoXbiREU6noDYX/Nq+Q+sl03V37WAp0YgtIlh9g==} - dev: true + '@algolia/requester-browser-xhr@4.23.3': + resolution: {integrity: sha512-jDWGIQ96BhXbmONAQsasIpTYWslyjkiGu0Quydjlowe+ciqySpiDUrJHERIRfELE5+wFc7hc1Q5hqjGoV7yghw==} - /@algolia/requester-node-http@4.21.1: - resolution: {integrity: sha512-EcD8cY6Bh2iMySpqXglTKU9+pt+km1ws3xF0V7CGMIUzW1HmN/ZVhi4apCBY4tEMytbyARv0XRTPsolSC4gSSw==} - dependencies: - '@algolia/requester-common': 4.21.1 - dev: true + '@algolia/requester-common@4.23.3': + resolution: {integrity: sha512-xloIdr/bedtYEGcXCiF2muajyvRhwop4cMZo+K2qzNht0CMzlRkm8YsDdj5IaBhshqfgmBb3rTg4sL4/PpvLYw==} - /@algolia/transporter@4.21.1: - resolution: {integrity: sha512-KGLFKz8krzOWRwcbR4FT49Grh1dES/mG8dHABEojbvrfUb6kUFxkAee/aezp2GIxuNx+gpQjRn1IzOsqbUZL0A==} - dependencies: - '@algolia/cache-common': 4.21.1 - '@algolia/logger-common': 4.21.1 - '@algolia/requester-common': 4.21.1 - dev: true + '@algolia/requester-node-http@4.23.3': + resolution: {integrity: sha512-zgu++8Uj03IWDEJM3fuNl34s746JnZOWn1Uz5taV1dFyJhVM/kTNw9Ik7YJWiUNHJQXcaD8IXD1eCb0nq/aByA==} + + '@algolia/transporter@4.23.3': + resolution: {integrity: sha512-Wjl5gttqnf/gQKJA+dafnD0Y6Yw97yvfY8R9h0dQltX1GXTgNs1zWgvtWW0tHl1EgMdhAyw189uWiZMnL3QebQ==} + + '@antfu/utils@0.7.10': + resolution: {integrity: sha512-+562v9k4aI80m1+VuMHehNJWLOFjBnXn3tdOitzD0il5b7smkSBal4+a3oKiQTbrwMmN/TBUMDvbdoWDehgOww==} + + '@babel/helper-string-parser@7.24.1': + resolution: {integrity: sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==} + engines: {node: '>=6.9.0'} + + '@babel/helper-string-parser@7.24.8': + resolution: {integrity: sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==} + engines: {node: '>=6.9.0'} - /@babel/helper-string-parser@7.23.4: - resolution: {integrity: sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==} + '@babel/helper-validator-identifier@7.24.5': + resolution: {integrity: sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA==} engines: {node: '>=6.9.0'} - dev: true - /@babel/helper-validator-identifier@7.22.20: - resolution: {integrity: sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==} + '@babel/helper-validator-identifier@7.24.7': + resolution: {integrity: sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==} engines: {node: '>=6.9.0'} - dev: true - /@babel/parser@7.23.6: - resolution: {integrity: sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==} + '@babel/parser@7.24.5': + resolution: {integrity: sha512-EOv5IK8arwh3LI47dz1b0tKUb/1uhHAnHJOrjgtQMIpu1uXd9mlFrJg9IUgGUgZ41Ch0K8REPTYpO7B76b4vJg==} engines: {node: '>=6.0.0'} hasBin: true - dependencies: - '@babel/types': 7.23.6 - dev: true - /@babel/types@7.23.6: - resolution: {integrity: sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==} + '@babel/parser@7.25.6': + resolution: {integrity: sha512-trGdfBdbD0l1ZPmcJ83eNxB9rbEax4ALFTF7fN386TMYbeCQbyme5cOEXQhbGXKebwGaB/J52w1mrklMcbgy6Q==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/types@7.24.5': + resolution: {integrity: sha512-6mQNsaLeXTw0nxYUYu+NSa4Hx4BlF1x1x8/PMFbiR+GBSr+2DkECc69b8hgy2frEodNcvPffeH8YfWd3LI6jhQ==} engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-string-parser': 7.23.4 - '@babel/helper-validator-identifier': 7.22.20 - to-fast-properties: 2.0.0 - dev: true - /@docsearch/css@3.5.2: - resolution: {integrity: sha512-SPiDHaWKQZpwR2siD0KQUwlStvIAnEyK6tAE2h2Wuoq8ue9skzhlyVQ1ddzOxX6khULnAALDiR/isSF3bnuciA==} - dev: true + '@babel/types@7.25.6': + resolution: {integrity: sha512-/l42B1qxpG6RdfYf343Uw1vmDjeNhneUXtzhojE7pDgfpEypmRhI6j1kr17XCVv4Cgl9HdAiQY2x0GwKm7rWCw==} + engines: {node: '>=6.9.0'} - /@docsearch/js@3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0): - resolution: {integrity: sha512-p1YFTCDflk8ieHgFJYfmyHBki1D61+U9idwrLh+GQQMrBSP3DLGKpy0XUJtPjAOPltcVbqsTjiPFfH7JImjUNg==} - dependencies: - '@docsearch/react': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0) - preact: 10.19.3 - transitivePeerDependencies: - - '@algolia/client-search' - - '@types/react' - - react - - react-dom - - search-insights - dev: true + '@braintree/sanitize-url@6.0.4': + resolution: {integrity: sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==} + + '@docsearch/css@3.6.1': + resolution: {integrity: sha512-VtVb5DS+0hRIprU2CO6ZQjK2Zg4QU5HrDM1+ix6rT0umsYvFvatMAnf97NHZlVWDaaLlx7GRfR/7FikANiM2Fg==} - /@docsearch/react@3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0): - resolution: {integrity: sha512-9Ahcrs5z2jq/DcAvYtvlqEBHImbm4YJI8M9y0x6Tqg598P40HTEkX7hsMcIuThI+hTFxRGZ9hll0Wygm2yEjng==} + '@docsearch/js@3.6.1': + resolution: {integrity: sha512-erI3RRZurDr1xES5hvYJ3Imp7jtrXj6f1xYIzDzxiS7nNBufYWPbJwrmMqWC5g9y165PmxEmN9pklGCdLi0Iqg==} + + '@docsearch/react@3.6.1': + resolution: {integrity: sha512-qXZkEPvybVhSXj0K7U3bXc233tk5e8PfhoZ6MhPOiik/qUQxYC+Dn9DnoS7CxHQQhHfCvTiN0eY9M12oRghEXw==} peerDependencies: '@types/react': '>= 16.8.0 < 19.0.0' react: '>= 16.8.0 < 19.0.0' @@ -223,1083 +168,2086 @@ packages: optional: true search-insights: optional: true - dependencies: - '@algolia/autocomplete-core': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0) - '@algolia/autocomplete-preset-algolia': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - '@docsearch/css': 3.5.2 - algoliasearch: 4.21.1 - search-insights: 2.13.0 - transitivePeerDependencies: - - '@algolia/client-search' - dev: true - /@esbuild/android-arm64@0.19.9: - resolution: {integrity: sha512-q4cR+6ZD0938R19MyEW3jEsMzbb/1rulLXiNAJQADD/XYp7pT+rOS5JGxvpRW8dFDEfjW4wLgC/3FXIw4zYglQ==} + '@esbuild/aix-ppc64@0.21.5': + resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.21.5': + resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} engines: {node: '>=12'} cpu: [arm64] os: [android] - requiresBuild: true - dev: true - optional: true - /@esbuild/android-arm@0.19.9: - resolution: {integrity: sha512-jkYjjq7SdsWuNI6b5quymW0oC83NN5FdRPuCbs9HZ02mfVdAP8B8eeqLSYU3gb6OJEaY5CQabtTFbqBf26H3GA==} + '@esbuild/android-arm@0.21.5': + resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} engines: {node: '>=12'} cpu: [arm] os: [android] - requiresBuild: true - dev: true - optional: true - /@esbuild/android-x64@0.19.9: - resolution: {integrity: sha512-KOqoPntWAH6ZxDwx1D6mRntIgZh9KodzgNOy5Ebt9ghzffOk9X2c1sPwtM9P+0eXbefnDhqYfkh5PLP5ULtWFA==} + '@esbuild/android-x64@0.21.5': + resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} engines: {node: '>=12'} cpu: [x64] os: [android] - requiresBuild: true - dev: true - optional: true - /@esbuild/darwin-arm64@0.19.9: - resolution: {integrity: sha512-KBJ9S0AFyLVx2E5D8W0vExqRW01WqRtczUZ8NRu+Pi+87opZn5tL4Y0xT0mA4FtHctd0ZgwNoN639fUUGlNIWw==} + '@esbuild/darwin-arm64@0.21.5': + resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} engines: {node: '>=12'} cpu: [arm64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@esbuild/darwin-x64@0.19.9: - resolution: {integrity: sha512-vE0VotmNTQaTdX0Q9dOHmMTao6ObjyPm58CHZr1UK7qpNleQyxlFlNCaHsHx6Uqv86VgPmR4o2wdNq3dP1qyDQ==} + '@esbuild/darwin-x64@0.21.5': + resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} engines: {node: '>=12'} cpu: [x64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@esbuild/freebsd-arm64@0.19.9: - resolution: {integrity: sha512-uFQyd/o1IjiEk3rUHSwUKkqZwqdvuD8GevWF065eqgYfexcVkxh+IJgwTaGZVu59XczZGcN/YMh9uF1fWD8j1g==} + '@esbuild/freebsd-arm64@0.21.5': + resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} engines: {node: '>=12'} cpu: [arm64] os: [freebsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/freebsd-x64@0.19.9: - resolution: {integrity: sha512-WMLgWAtkdTbTu1AWacY7uoj/YtHthgqrqhf1OaEWnZb7PQgpt8eaA/F3LkV0E6K/Lc0cUr/uaVP/49iE4M4asA==} + '@esbuild/freebsd-x64@0.21.5': + resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} engines: {node: '>=12'} cpu: [x64] os: [freebsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-arm64@0.19.9: - resolution: {integrity: sha512-PiPblfe1BjK7WDAKR1Cr9O7VVPqVNpwFcPWgfn4xu0eMemzRp442hXyzF/fSwgrufI66FpHOEJk0yYdPInsmyQ==} + '@esbuild/linux-arm64@0.21.5': + resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} engines: {node: '>=12'} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-arm@0.19.9: - resolution: {integrity: sha512-C/ChPohUYoyUaqn1h17m/6yt6OB14hbXvT8EgM1ZWaiiTYz7nWZR0SYmMnB5BzQA4GXl3BgBO1l8MYqL/He3qw==} + '@esbuild/linux-arm@0.21.5': + resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} engines: {node: '>=12'} cpu: [arm] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-ia32@0.19.9: - resolution: {integrity: sha512-f37i/0zE0MjDxijkPSQw1CO/7C27Eojqb+r3BbHVxMLkj8GCa78TrBZzvPyA/FNLUMzP3eyHCVkAopkKVja+6Q==} + '@esbuild/linux-ia32@0.21.5': + resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} engines: {node: '>=12'} cpu: [ia32] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-loong64@0.19.9: - resolution: {integrity: sha512-t6mN147pUIf3t6wUt3FeumoOTPfmv9Cc6DQlsVBpB7eCpLOqQDyWBP1ymXn1lDw4fNUSb/gBcKAmvTP49oIkaA==} + '@esbuild/linux-loong64@0.21.5': + resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} engines: {node: '>=12'} cpu: [loong64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-mips64el@0.19.9: - resolution: {integrity: sha512-jg9fujJTNTQBuDXdmAg1eeJUL4Jds7BklOTkkH80ZgQIoCTdQrDaHYgbFZyeTq8zbY+axgptncko3v9p5hLZtw==} + '@esbuild/linux-mips64el@0.21.5': + resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} engines: {node: '>=12'} cpu: [mips64el] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-ppc64@0.19.9: - resolution: {integrity: sha512-tkV0xUX0pUUgY4ha7z5BbDS85uI7ABw3V1d0RNTii7E9lbmV8Z37Pup2tsLV46SQWzjOeyDi1Q7Wx2+QM8WaCQ==} + '@esbuild/linux-ppc64@0.21.5': + resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} engines: {node: '>=12'} cpu: [ppc64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-riscv64@0.19.9: - resolution: {integrity: sha512-DfLp8dj91cufgPZDXr9p3FoR++m3ZJ6uIXsXrIvJdOjXVREtXuQCjfMfvmc3LScAVmLjcfloyVtpn43D56JFHg==} + '@esbuild/linux-riscv64@0.21.5': + resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} engines: {node: '>=12'} cpu: [riscv64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-s390x@0.19.9: - resolution: {integrity: sha512-zHbglfEdC88KMgCWpOl/zc6dDYJvWGLiUtmPRsr1OgCViu3z5GncvNVdf+6/56O2Ca8jUU+t1BW261V6kp8qdw==} + '@esbuild/linux-s390x@0.21.5': + resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} engines: {node: '>=12'} cpu: [s390x] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-x64@0.19.9: - resolution: {integrity: sha512-JUjpystGFFmNrEHQnIVG8hKwvA2DN5o7RqiO1CVX8EN/F/gkCjkUMgVn6hzScpwnJtl2mPR6I9XV1oW8k9O+0A==} + '@esbuild/linux-x64@0.21.5': + resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} engines: {node: '>=12'} cpu: [x64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/netbsd-x64@0.19.9: - resolution: {integrity: sha512-GThgZPAwOBOsheA2RUlW5UeroRfESwMq/guy8uEe3wJlAOjpOXuSevLRd70NZ37ZrpO6RHGHgEHvPg1h3S1Jug==} + '@esbuild/netbsd-x64@0.21.5': + resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} engines: {node: '>=12'} cpu: [x64] os: [netbsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/openbsd-x64@0.19.9: - resolution: {integrity: sha512-Ki6PlzppaFVbLnD8PtlVQfsYw4S9n3eQl87cqgeIw+O3sRr9IghpfSKY62mggdt1yCSZ8QWvTZ9jo9fjDSg9uw==} + '@esbuild/openbsd-x64@0.21.5': + resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} engines: {node: '>=12'} cpu: [x64] os: [openbsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/sunos-x64@0.19.9: - resolution: {integrity: sha512-MLHj7k9hWh4y1ddkBpvRj2b9NCBhfgBt3VpWbHQnXRedVun/hC7sIyTGDGTfsGuXo4ebik2+3ShjcPbhtFwWDw==} + '@esbuild/sunos-x64@0.21.5': + resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} engines: {node: '>=12'} cpu: [x64] os: [sunos] - requiresBuild: true - dev: true - optional: true - /@esbuild/win32-arm64@0.19.9: - resolution: {integrity: sha512-GQoa6OrQ8G08guMFgeXPH7yE/8Dt0IfOGWJSfSH4uafwdC7rWwrfE6P9N8AtPGIjUzdo2+7bN8Xo3qC578olhg==} + '@esbuild/win32-arm64@0.21.5': + resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} engines: {node: '>=12'} cpu: [arm64] os: [win32] - requiresBuild: true - dev: true - optional: true - /@esbuild/win32-ia32@0.19.9: - resolution: {integrity: sha512-UOozV7Ntykvr5tSOlGCrqU3NBr3d8JqPes0QWN2WOXfvkWVGRajC+Ym0/Wj88fUgecUCLDdJPDF0Nna2UK3Qtg==} + '@esbuild/win32-ia32@0.21.5': + resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} engines: {node: '>=12'} cpu: [ia32] os: [win32] - requiresBuild: true - dev: true - optional: true - /@esbuild/win32-x64@0.19.9: - resolution: {integrity: sha512-oxoQgglOP7RH6iasDrhY+R/3cHrfwIDvRlT4CGChflq6twk8iENeVvMJjmvBb94Ik1Z+93iGO27err7w6l54GQ==} + '@esbuild/win32-x64@0.21.5': + resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} engines: {node: '>=12'} cpu: [x64] os: [win32] - requiresBuild: true - dev: true - optional: true - - /@jridgewell/sourcemap-codec@1.4.15: - resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - dev: true - - /@mdit-vue/plugin-component@1.0.0: - resolution: {integrity: sha512-ZXsJwxkG5yyTHARIYbR74cT4AZ0SfMokFFjiHYCbypHIeYWgJhso4+CZ8+3V9EWFG3EHlGoKNGqKp9chHnqntQ==} - dependencies: - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - /@mdit-vue/plugin-frontmatter@1.0.0: - resolution: {integrity: sha512-MMA7Ny+YPZA7eDOY1t4E+rKuEWO39mzDdP/M68fKdXJU6VfcGkPr7gnpnJfW2QBJ5qIvMrK/3lDAA2JBy5TfpA==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - gray-matter: 4.0.3 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-headers@1.0.0: - resolution: {integrity: sha512-0rK/iKy6x13d/Pp5XxdLBshTD0+YjZvtHIaIV+JO+/H2WnOv7oaRgs48G5d44z3XJVUE2u6fNnTlI169fef0/A==} - dependencies: - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-sfc@1.0.0: - resolution: {integrity: sha512-agMUe0fY4YHxsZivSvplBwRwrFvsIf/JNUJCAYq1+2Sg9+2hviTBZwjZDxYqHDHOVLtiNr+wuo68tE24mAx3AQ==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-title@1.0.0: - resolution: {integrity: sha512-8yC60fCZ95xcJ/cvJH4Lv43Rs4k+33UGyKrRWj5J8TNyMwUyGcwur0XyPM+ffJH4/Bzq4myZLsj/TTFSkXRxvw==} - dependencies: - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} - /@mdit-vue/plugin-toc@1.0.0: - resolution: {integrity: sha512-WN8blfX0X/5Nolic0ClDWP7eVo9IB+U4g0jbycX3lolIZX5Bai1UpsD3QYZr5VVsPbQJMKMGvTrCEtCNTGvyWQ==} - dependencies: - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true + '@jridgewell/sourcemap-codec@1.4.15': + resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - /@mdit-vue/shared@1.0.0: - resolution: {integrity: sha512-nbYBfmEi+pR2Lm0Z6TMVX2/iBjfr/kGEsHW8CC0rQw+3+sG5dY6VG094HuFAkiAmmvZx9DZZb+7ZMWp9vkwCRw==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true + '@jridgewell/sourcemap-codec@1.5.0': + resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} - /@mdit-vue/types@1.0.0: - resolution: {integrity: sha512-xeF5+sHLzRNF7plbksywKCph4qli20l72of2fMlZQQ7RECvXYrRkE9+bjRFQCyULC7B8ydUYbpbkux5xJlVWyw==} - dev: true + '@mermaid-js/mermaid-mindmap@9.3.0': + resolution: {integrity: sha512-IhtYSVBBRYviH1Ehu8gk69pMDF8DSRqXBRDMWrEfHoaMruHeaP2DXA3PBnuwsMaCdPQhlUUcy/7DBLAEIXvCAw==} - /@nodelib/fs.scandir@2.1.5: + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - run-parallel: 1.2.0 - dev: true - /@nodelib/fs.stat@2.0.5: + '@nodelib/fs.stat@2.0.5': resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} engines: {node: '>= 8'} - dev: true - /@nodelib/fs.walk@1.2.8: + '@nodelib/fs.walk@1.2.8': resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.scandir': 2.1.5 - fastq: 1.15.0 - dev: true - /@rollup/rollup-android-arm-eabi@4.8.0: - resolution: {integrity: sha512-zdTObFRoNENrdPpnTNnhOljYIcOX7aI7+7wyrSpPFFIOf/nRdedE6IYsjaBE7tjukphh1tMTojgJ7p3lKY8x6Q==} + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@playwright/test@1.44.0': + resolution: {integrity: sha512-rNX5lbNidamSUorBhB4XZ9SQTjAqfe5M+p37Z8ic0jPFBMo5iCtQz1kRWkEMg+rYOKSlVycpQmpqjSFq7LXOfg==} + engines: {node: '>=16'} + hasBin: true + + '@rollup/rollup-android-arm-eabi@4.22.4': + resolution: {integrity: sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==} cpu: [arm] os: [android] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-android-arm64@4.8.0: - resolution: {integrity: sha512-aiItwP48BiGpMFS9Znjo/xCNQVwTQVcRKkFKsO81m8exrGjHkCBDvm9PHay2kpa8RPnZzzKcD1iQ9KaLY4fPQQ==} + '@rollup/rollup-android-arm64@4.22.4': + resolution: {integrity: sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==} cpu: [arm64] os: [android] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-darwin-arm64@4.8.0: - resolution: {integrity: sha512-zhNIS+L4ZYkYQUjIQUR6Zl0RXhbbA0huvNIWjmPc2SL0cB1h5Djkcy+RZ3/Bwszfb6vgwUvcVJYD6e6Zkpsi8g==} + '@rollup/rollup-darwin-arm64@4.22.4': + resolution: {integrity: sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==} cpu: [arm64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-darwin-x64@4.8.0: - resolution: {integrity: sha512-A/FAHFRNQYrELrb/JHncRWzTTXB2ticiRFztP4ggIUAfa9Up1qfW8aG2w/mN9jNiZ+HB0t0u0jpJgFXG6BfRTA==} + '@rollup/rollup-darwin-x64@4.22.4': + resolution: {integrity: sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==} cpu: [x64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-arm-gnueabihf@4.8.0: - resolution: {integrity: sha512-JsidBnh3p2IJJA4/2xOF2puAYqbaczB3elZDT0qHxn362EIoIkq7hrR43Xa8RisgI6/WPfvb2umbGsuvf7E37A==} + '@rollup/rollup-linux-arm-gnueabihf@4.22.4': + resolution: {integrity: sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.22.4': + resolution: {integrity: sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==} cpu: [arm] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-arm64-gnu@4.8.0: - resolution: {integrity: sha512-hBNCnqw3EVCkaPB0Oqd24bv8SklETptQWcJz06kb9OtiShn9jK1VuTgi7o4zPSt6rNGWQOTDEAccbk0OqJmS+g==} + '@rollup/rollup-linux-arm64-gnu@4.22.4': + resolution: {integrity: sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-arm64-musl@4.8.0: - resolution: {integrity: sha512-Fw9ChYfJPdltvi9ALJ9wzdCdxGw4wtq4t1qY028b2O7GwB5qLNSGtqMsAel1lfWTZvf4b6/+4HKp0GlSYg0ahA==} + '@rollup/rollup-linux-arm64-musl@4.22.4': + resolution: {integrity: sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-riscv64-gnu@4.8.0: - resolution: {integrity: sha512-BH5xIh7tOzS9yBi8dFrCTG8Z6iNIGWGltd3IpTSKp6+pNWWO6qy8eKoRxOtwFbMrid5NZaidLYN6rHh9aB8bEw==} + '@rollup/rollup-linux-powerpc64le-gnu@4.22.4': + resolution: {integrity: sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.22.4': + resolution: {integrity: sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==} cpu: [riscv64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-x64-gnu@4.8.0: - resolution: {integrity: sha512-PmvAj8k6EuWiyLbkNpd6BLv5XeYFpqWuRvRNRl80xVfpGXK/z6KYXmAgbI4ogz7uFiJxCnYcqyvZVD0dgFog7Q==} + '@rollup/rollup-linux-s390x-gnu@4.22.4': + resolution: {integrity: sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.22.4': + resolution: {integrity: sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==} cpu: [x64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-x64-musl@4.8.0: - resolution: {integrity: sha512-mdxnlW2QUzXwY+95TuxZ+CurrhgrPAMveDWI97EQlA9bfhR8tw3Pt7SUlc/eSlCNxlWktpmT//EAA8UfCHOyXg==} + '@rollup/rollup-linux-x64-musl@4.22.4': + resolution: {integrity: sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==} cpu: [x64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-win32-arm64-msvc@4.8.0: - resolution: {integrity: sha512-ge7saUz38aesM4MA7Cad8CHo0Fyd1+qTaqoIo+Jtk+ipBi4ATSrHWov9/S4u5pbEQmLjgUjB7BJt+MiKG2kzmA==} + '@rollup/rollup-win32-arm64-msvc@4.22.4': + resolution: {integrity: sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==} cpu: [arm64] os: [win32] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-win32-ia32-msvc@4.8.0: - resolution: {integrity: sha512-p9E3PZlzurhlsN5h9g7zIP1DnqKXJe8ZUkFwAazqSvHuWfihlIISPxG9hCHCoA+dOOspL/c7ty1eeEVFTE0UTw==} + '@rollup/rollup-win32-ia32-msvc@4.22.4': + resolution: {integrity: sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==} cpu: [ia32] os: [win32] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-win32-x64-msvc@4.8.0: - resolution: {integrity: sha512-kb4/auKXkYKqlUYTE8s40FcJIj5soOyRLHKd4ugR0dCq0G2EfcF54eYcfQiGkHzjidZ40daB4ulsFdtqNKZtBg==} + '@rollup/rollup-win32-x64-msvc@4.22.4': + resolution: {integrity: sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==} cpu: [x64] os: [win32] - requiresBuild: true - dev: true - optional: true - /@sindresorhus/merge-streams@1.0.0: - resolution: {integrity: sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==} - engines: {node: '>=18'} - dev: true + '@shikijs/core@1.18.0': + resolution: {integrity: sha512-VK4BNVCd2leY62Nm2JjyxtRLkyrZT/tv104O81eyaCjHq4Adceq2uJVFJJAIof6lT1mBwZrEo2qT/T+grv3MQQ==} + + '@shikijs/engine-javascript@1.18.0': + resolution: {integrity: sha512-qoP/aO/ATNwYAUw1YMdaip/YVEstMZEgrwhePm83Ll9OeQPuxDZd48szZR8oSQNQBT8m8UlWxZv8EA3lFuyI5A==} + + '@shikijs/engine-oniguruma@1.18.0': + resolution: {integrity: sha512-B9u0ZKI/cud+TcmF8Chyh+R4V5qQVvyDOqXC2l2a4x73PBSBc6sZ0JRAX3eqyJswqir6ktwApUUGBYePdKnMJg==} + + '@shikijs/transformers@1.18.0': + resolution: {integrity: sha512-EdX/UIVaaS8qp9NWRyHIXp2dmuLpdVvx+UVpbIn9eafFlLemAuljPb2+K40ie6jrlg0uUIqkg25CM/8I34yBNw==} + + '@shikijs/types@1.18.0': + resolution: {integrity: sha512-O9N36UEaGGrxv1yUrN2nye7gDLG5Uq0/c1LyfmxsvzNPqlHzWo9DI0A4+fhW2y3bGKuQu/fwS7EPdKJJCowcVA==} - /@types/debug@4.1.12: + '@shikijs/vscode-textmate@9.2.2': + resolution: {integrity: sha512-TMp15K+GGYrWlZM8+Lnj9EaHEFmOen0WJBrfa17hF7taDOYthuPPV0GWzfd/9iMij0akS/8Yw2ikquH7uVi/fg==} + + '@types/d3-scale-chromatic@3.0.3': + resolution: {integrity: sha512-laXM4+1o5ImZv3RpFAsTRn3TEkzqkytiOY0Dz0sq5cnd1dtNlk6sHLon4OvqaiJb28T0S/TdsBI3Sjsy+keJrw==} + + '@types/d3-scale@4.0.8': + resolution: {integrity: sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==} + + '@types/d3-time@3.0.3': + resolution: {integrity: sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==} + + '@types/debug@4.1.12': resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} - dependencies: - '@types/ms': 0.7.34 - dev: true - /@types/fs-extra@11.0.4: - resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} - dependencies: - '@types/jsonfile': 6.1.4 - '@types/node': 20.10.4 - dev: true + '@types/estree@1.0.5': + resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} - /@types/hash-sum@1.0.2: - resolution: {integrity: sha512-UP28RddqY8xcU0SCEp9YKutQICXpaAq9N8U2klqF5hegGha7KzTOL8EdhIIV3bOSGBzjEpN9bU/d+nNZBdJYVw==} - dev: true + '@types/hast@3.0.4': + resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} - /@types/jsonfile@6.1.4: - resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} - dependencies: - '@types/node': 20.10.4 - dev: true + '@types/linkify-it@5.0.0': + resolution: {integrity: sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==} - /@types/linkify-it@3.0.5: - resolution: {integrity: sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw==} - dev: true + '@types/markdown-it@14.1.2': + resolution: {integrity: sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==} - /@types/markdown-it-emoji@2.0.4: - resolution: {integrity: sha512-H6ulk/ZmbDxOayPwI/leJzrmoW1YKX1Z+MVSCHXuYhvqckV4I/c+hPTf6UiqJyn2avWugfj30XroheEb6/Ekqg==} - dependencies: - '@types/markdown-it': 13.0.7 - dev: true + '@types/mdast@3.0.15': + resolution: {integrity: sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==} - /@types/markdown-it@13.0.7: - resolution: {integrity: sha512-U/CBi2YUUcTHBt5tjO2r5QV/x0Po6nsYwQU4Y04fBS6vfoImaiZ6f8bi3CjTCxBPQSO1LMyUqkByzi8AidyxfA==} - dependencies: - '@types/linkify-it': 3.0.5 - '@types/mdurl': 1.0.5 - dev: true + '@types/mdast@4.0.4': + resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} - /@types/mdurl@1.0.5: - resolution: {integrity: sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA==} - dev: true + '@types/mdurl@2.0.0': + resolution: {integrity: sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==} - /@types/ms@0.7.34: + '@types/ms@0.7.34': resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==} - dev: true - /@types/node@17.0.45: + '@types/node@17.0.45': resolution: {integrity: sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==} - dev: true - /@types/node@20.10.4: - resolution: {integrity: sha512-D08YG6rr8X90YB56tSIuBaddy/UXAA9RKJoFvrsnogAum/0pmjkgi4+2nx96A330FmioegBWmEYQ+syqCFaveg==} - dependencies: - undici-types: 5.26.5 - dev: true + '@types/node@20.12.12': + resolution: {integrity: sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw==} - /@types/sax@1.2.7: + '@types/sax@1.2.7': resolution: {integrity: sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==} - dependencies: - '@types/node': 20.10.4 - dev: true - /@types/web-bluetooth@0.0.20: + '@types/unist@2.0.11': + resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==} + + '@types/unist@3.0.3': + resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} + + '@types/web-bluetooth@0.0.20': resolution: {integrity: sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==} - dev: true - /@vitejs/plugin-vue@4.5.2(vite@5.0.12)(vue@3.3.11): - resolution: {integrity: sha512-UGR3DlzLi/SaVBPX0cnSyE37vqxU3O6chn8l0HJNzQzDia6/Au2A4xKv+iIJW8w2daf80G7TYHhi1pAUjdZ0bQ==} - engines: {node: ^14.18.0 || >=16.0.0} + '@ungap/structured-clone@1.2.0': + resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==} + + '@vitejs/plugin-vue@5.1.4': + resolution: {integrity: sha512-N2XSI2n3sQqp5w7Y/AN/L2XDjBIRGqXko+eDp42sydYSBeJuSm5a1sLf8zakmo8u7tA8NmBgoDLA1HeOESjp9A==} + engines: {node: ^18.0.0 || >=20.0.0} peerDependencies: - vite: ^4.0.0 || ^5.0.0 + vite: ^5.0.0 vue: ^3.2.25 - dependencies: - vite: 5.0.12 - vue: 3.3.11 - dev: true - - /@vue/compiler-core@3.3.11: - resolution: {integrity: sha512-h97/TGWBilnLuRaj58sxNrsUU66fwdRKLOLQ9N/5iNDfp+DZhYH9Obhe0bXxhedl8fjAgpRANpiZfbgWyruQ0w==} - dependencies: - '@babel/parser': 7.23.6 - '@vue/shared': 3.3.11 - estree-walker: 2.0.2 - source-map-js: 1.0.2 - dev: true - /@vue/compiler-dom@3.3.11: - resolution: {integrity: sha512-zoAiUIqSKqAJ81WhfPXYmFGwDRuO+loqLxvXmfUdR5fOitPoUiIeFI9cTTyv9MU5O1+ZZglJVTusWzy+wfk5hw==} - dependencies: - '@vue/compiler-core': 3.3.11 - '@vue/shared': 3.3.11 - dev: true + '@vue/compiler-core@3.4.27': + resolution: {integrity: sha512-E+RyqY24KnyDXsCuQrI+mlcdW3ALND6U7Gqa/+bVwbcpcR3BRRIckFoz7Qyd4TTlnugtwuI7YgjbvsLmxb+yvg==} - /@vue/compiler-sfc@3.3.11: - resolution: {integrity: sha512-U4iqPlHO0KQeK1mrsxCN0vZzw43/lL8POxgpzcJweopmqtoYy9nljJzWDIQS3EfjiYhfdtdk9Gtgz7MRXnz3GA==} - dependencies: - '@babel/parser': 7.23.6 - '@vue/compiler-core': 3.3.11 - '@vue/compiler-dom': 3.3.11 - '@vue/compiler-ssr': 3.3.11 - '@vue/reactivity-transform': 3.3.11 - '@vue/shared': 3.3.11 - estree-walker: 2.0.2 - magic-string: 0.30.5 - postcss: 8.4.32 - source-map-js: 1.0.2 - dev: true + '@vue/compiler-core@3.5.8': + resolution: {integrity: sha512-Uzlxp91EPjfbpeO5KtC0KnXPkuTfGsNDeaKQJxQN718uz+RqDYarEf7UhQJGK+ZYloD2taUbHTI2J4WrUaZQNA==} - /@vue/compiler-ssr@3.3.11: - resolution: {integrity: sha512-Zd66ZwMvndxRTgVPdo+muV4Rv9n9DwQ4SSgWWKWkPFebHQfVYRrVjeygmmDmPewsHyznCNvJ2P2d6iOOhdv8Qg==} - dependencies: - '@vue/compiler-dom': 3.3.11 - '@vue/shared': 3.3.11 - dev: true + '@vue/compiler-dom@3.4.27': + resolution: {integrity: sha512-kUTvochG/oVgE1w5ViSr3KUBh9X7CWirebA3bezTbB5ZKBQZwR2Mwj9uoSKRMFcz4gSMzzLXBPD6KpCLb9nvWw==} - /@vue/devtools-api@6.5.1: - resolution: {integrity: sha512-+KpckaAQyfbvshdDW5xQylLni1asvNSGme1JFs8I1+/H5pHEhqUKMEQD/qn3Nx5+/nycBq11qAEi8lk+LXI2dA==} - dev: true + '@vue/compiler-dom@3.5.8': + resolution: {integrity: sha512-GUNHWvoDSbSa5ZSHT9SnV5WkStWfzJwwTd6NMGzilOE/HM5j+9EB9zGXdtu/fCNEmctBqMs6C9SvVPpVPuk1Eg==} - /@vue/reactivity-transform@3.3.11: - resolution: {integrity: sha512-fPGjH0wqJo68A0wQ1k158utDq/cRyZNlFoxGwNScE28aUFOKFEnCBsvyD8jHn+0kd0UKVpuGuaZEQ6r9FJRqCg==} - dependencies: - '@babel/parser': 7.23.6 - '@vue/compiler-core': 3.3.11 - '@vue/shared': 3.3.11 - estree-walker: 2.0.2 - magic-string: 0.30.5 - dev: true + '@vue/compiler-sfc@3.4.27': + resolution: {integrity: sha512-nDwntUEADssW8e0rrmE0+OrONwmRlegDA1pD6QhVeXxjIytV03yDqTey9SBDiALsvAd5U4ZrEKbMyVXhX6mCGA==} - /@vue/reactivity@3.3.11: - resolution: {integrity: sha512-D5tcw091f0nuu+hXq5XANofD0OXnBmaRqMYl5B3fCR+mX+cXJIGNw/VNawBqkjLNWETrFW0i+xH9NvDbTPVh7g==} - dependencies: - '@vue/shared': 3.3.11 - dev: true + '@vue/compiler-sfc@3.5.8': + resolution: {integrity: sha512-taYpngQtSysrvO9GULaOSwcG5q821zCoIQBtQQSx7Uf7DxpR6CIHR90toPr9QfDD2mqHQPCSgoWBvJu0yV9zjg==} - /@vue/runtime-core@3.3.11: - resolution: {integrity: sha512-g9ztHGwEbS5RyWaOpXuyIVFTschclnwhqEbdy5AwGhYOgc7m/q3NFwr50MirZwTTzX55JY8pSkeib9BX04NIpw==} - dependencies: - '@vue/reactivity': 3.3.11 - '@vue/shared': 3.3.11 - dev: true + '@vue/compiler-ssr@3.4.27': + resolution: {integrity: sha512-CVRzSJIltzMG5FcidsW0jKNQnNRYC8bT21VegyMMtHmhW3UOI7knmUehzswXLrExDLE6lQCZdrhD4ogI7c+vuw==} - /@vue/runtime-dom@3.3.11: - resolution: {integrity: sha512-OlhtV1PVpbgk+I2zl+Y5rQtDNcCDs12rsRg71XwaA2/Rbllw6mBLMi57VOn8G0AjOJ4Mdb4k56V37+g8ukShpQ==} - dependencies: - '@vue/runtime-core': 3.3.11 - '@vue/shared': 3.3.11 - csstype: 3.1.3 - dev: true + '@vue/compiler-ssr@3.5.8': + resolution: {integrity: sha512-W96PtryNsNG9u0ZnN5Q5j27Z/feGrFV6zy9q5tzJVyJaLiwYxvC0ek4IXClZygyhjm+XKM7WD9pdKi/wIRVC/Q==} - /@vue/server-renderer@3.3.11(vue@3.3.11): - resolution: {integrity: sha512-AIWk0VwwxCAm4wqtJyxBylRTXSy1wCLOKbWxHaHiu14wjsNYtiRCSgVuqEPVuDpErOlRdNnuRgipQfXRLjLN5A==} - peerDependencies: - vue: 3.3.11 - dependencies: - '@vue/compiler-ssr': 3.3.11 - '@vue/shared': 3.3.11 - vue: 3.3.11 - dev: true - - /@vue/shared@3.3.11: - resolution: {integrity: sha512-u2G8ZQ9IhMWTMXaWqZycnK4UthG1fA238CD+DP4Dm4WJi5hdUKKLg0RMRaRpDPNMdkTwIDkp7WtD0Rd9BH9fLw==} - dev: true - - /@vuepress/bundler-vite@2.0.0-rc.0: - resolution: {integrity: sha512-rX8S8IYpqqlJfNPstS/joorpxXx/4WuE7+gDM31i2HUrxOKGZVzq8ZsRRRU2UdoTwHZSd3LpUS4sMtxE5xLK1A==} - dependencies: - '@vitejs/plugin-vue': 4.5.2(vite@5.0.12)(vue@3.3.11) - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - autoprefixer: 10.4.16(postcss@8.4.32) - connect-history-api-fallback: 2.0.0 - postcss: 8.4.32 - postcss-load-config: 4.0.2(postcss@8.4.32) - rollup: 4.8.0 - vite: 5.0.12 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@types/node' - - '@vue/composition-api' - - less - - lightningcss - - sass - - stylus - - sugarss - - supports-color - - terser - - ts-node - - typescript - dev: true + '@vue/devtools-api@7.4.5': + resolution: {integrity: sha512-PX9uXirHOY2P99kb1cP3DxWZojFW3acNMqd+l4i5nKcqY59trXTOfwDZXt2Qifu0OU1izAQb76Ur6NPVldF2KQ==} - /@vuepress/cli@2.0.0-rc.0: - resolution: {integrity: sha512-XWSIFO9iOR7N4O2lXIwS5vZuLjU9WU/aGAtmhMWEMxrdMx7TQaJbgrfpTUEbHMf+cPI1DXBbUbtmkqIvtfOV0w==} - hasBin: true - dependencies: - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - cac: 6.7.14 - chokidar: 3.5.3 - envinfo: 7.11.0 - esbuild: 0.19.9 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/devtools-kit@7.4.5': + resolution: {integrity: sha512-Uuki4Z6Bc/ExvtlPkeDNGSAe4580R+HPcVABfTE9TF7BTz3Nntk7vxIRUyWblZkUEcB/x+wn2uofyt5i2LaUew==} - /@vuepress/client@2.0.0-rc.0: - resolution: {integrity: sha512-TwQx8hJgYONYxX+QltZ2aw9O5Ym6SKelfiUduuIRb555B1gece/jSVap3H/ZwyBhpgJMtG4+/Mrmf8nlDSHjvw==} - dependencies: - '@vue/devtools-api': 6.5.1 - '@vuepress/shared': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - typescript - dev: true + '@vue/devtools-shared@7.4.5': + resolution: {integrity: sha512-2XgUOkL/7QDmyYI9J7cm+rz/qBhcGv+W5+i1fhwdQ0HQ1RowhdK66F0QBuJSz/5k12opJY8eN6m03/XZMs7imQ==} - /@vuepress/core@2.0.0-rc.0: - resolution: {integrity: sha512-uoOaZP1MdxZYJIAJcRcmYKKeCIVnxZeOuLMOOB9CPuAKSalT1RvJ1lztw6RX3q9SPnlqtSZPQXDncPAZivw4pA==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/markdown': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/markdown@2.0.0-rc.0: - resolution: {integrity: sha512-USmqdKKMT6ZFHYRztTjKUlO8qgGfnEygMAAq4AzC/uYXiEfrbMBLAWJhteyGS56P3rGLj0OPAhksE681bX/wOg==} - dependencies: - '@mdit-vue/plugin-component': 1.0.0 - '@mdit-vue/plugin-frontmatter': 1.0.0 - '@mdit-vue/plugin-headers': 1.0.0 - '@mdit-vue/plugin-sfc': 1.0.0 - '@mdit-vue/plugin-title': 1.0.0 - '@mdit-vue/plugin-toc': 1.0.0 - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - '@types/markdown-it-emoji': 2.0.4 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - markdown-it: 13.0.2 - markdown-it-anchor: 8.6.7(@types/markdown-it@13.0.7)(markdown-it@13.0.2) - markdown-it-emoji: 2.0.2 - mdurl: 1.0.1 - transitivePeerDependencies: - - supports-color - dev: true + '@vue/reactivity@3.4.27': + resolution: {integrity: sha512-kK0g4NknW6JX2yySLpsm2jlunZJl2/RJGZ0H9ddHdfBVHcNzxmQ0sS0b09ipmBoQpY8JM2KmUw+a6sO8Zo+zIA==} - /@vuepress/plugin-active-header-links@2.0.0-rc.0: - resolution: {integrity: sha512-UJdXLYNGL5Wjy5YGY8M2QgqT75bZ95EHebbqGi8twBdIJE9O+bM+dPJyYtAk2PIVqFORiw3Hj+PchsNSxdn9+g==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - ts-debounce: 4.0.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/reactivity@3.5.8': + resolution: {integrity: sha512-mlgUyFHLCUZcAYkqvzYnlBRCh0t5ZQfLYit7nukn1GR96gc48Bp4B7OIcSfVSvlG1k3BPfD+p22gi1t2n9tsXg==} - /@vuepress/plugin-back-to-top@2.0.0-rc.0: - resolution: {integrity: sha512-6GPfuzV5lkAnR00BxRUhqMXwMWt741alkq2R6bln4N8BneSOwEpX/7vi19MGf232aKdS/Va4pF5p0/nJ8Sed/g==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - ts-debounce: 4.0.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/plugin-container@2.0.0-rc.0: - resolution: {integrity: sha512-b7vrLN11YE7qiUDPfA3N9P7Z8fupe9Wbcr9KAE/bmfZ9VT4d6kzpVyoU7XHi99XngitsmnkaXP4aBvBF1c2AnA==} - dependencies: - '@types/markdown-it': 13.0.7 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/markdown': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - markdown-it: 13.0.2 - markdown-it-container: 3.0.0 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/plugin-docsearch@2.0.0-rc.0(@algolia/client-search@4.21.1)(search-insights@2.13.0): - resolution: {integrity: sha512-bFbb+RxNyoLVbojv3Fh3UNfMmx9tszdae5ni9nG2xa05giCRwGKT0wFG3Q6n0a9kIQ6V7z3PjCj9x1k4SALPEA==} - dependencies: - '@docsearch/css': 3.5.2 - '@docsearch/js': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0) - '@docsearch/react': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0) - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - ts-debounce: 4.0.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@algolia/client-search' - - '@types/react' - - '@vue/composition-api' - - react - - react-dom - - search-insights - - supports-color - - typescript - dev: true + '@vue/runtime-core@3.4.27': + resolution: {integrity: sha512-7aYA9GEbOOdviqVvcuweTLe5Za4qBZkUY7SvET6vE8kyypxVgaT1ixHLg4urtOlrApdgcdgHoTZCUuTGap/5WA==} - /@vuepress/plugin-external-link-icon@2.0.0-rc.0: - resolution: {integrity: sha512-o8bk0oIlj/BkKc02mq91XLDloq1VOz/8iNcRwKAeqBE6svXzdYiyoTGet0J/4iPuAetsCn75S57W6RioDJHMnQ==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/markdown': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/runtime-core@3.5.8': + resolution: {integrity: sha512-fJuPelh64agZ8vKkZgp5iCkPaEqFJsYzxLk9vSC0X3G8ppknclNDr61gDc45yBGTaN5Xqc1qZWU3/NoaBMHcjQ==} - /@vuepress/plugin-git@2.0.0-rc.0: - resolution: {integrity: sha512-r7UF77vZxaYeJQLygzodKv+15z3/dTLuGp4VcYO21W6BlJZvd4u9zqgiV7A//bZQvK4+3Hprylr0G3KgXqMewA==} - dependencies: - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - execa: 8.0.1 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/runtime-dom@3.4.27': + resolution: {integrity: sha512-ScOmP70/3NPM+TW9hvVAz6VWWtZJqkbdf7w6ySsws+EsqtHvkhxaWLecrTorFxsawelM5Ys9FnDEMt6BPBDS0Q==} - /@vuepress/plugin-google-analytics@2.0.0-rc.0: - resolution: {integrity: sha512-rkYW2LGkLAfRFtaFWVPr1V2mS6hwgYhn2hLeJAF5xHlC3PcjCiSV0cqH7ooeCo+FBJUlCtMQ9N8iSNl63vd7VQ==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/runtime-dom@3.5.8': + resolution: {integrity: sha512-DpAUz+PKjTZPUOB6zJgkxVI3GuYc2iWZiNeeHQUw53kdrparSTG6HeXUrYDjaam8dVsCdvQxDz6ZWxnyjccUjQ==} - /@vuepress/plugin-medium-zoom@2.0.0-rc.0: - resolution: {integrity: sha512-peU1lYKsmKikIe/0pkJuHzD/k6xW2TuqdvKVhV4I//aOE1WxsREKJ4ACcldmoIsnysoDydAUqKT6xDPGyDsH2g==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - medium-zoom: 1.1.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/server-renderer@3.4.27': + resolution: {integrity: sha512-dlAMEuvmeA3rJsOMJ2J1kXU7o7pOxgsNHVr9K8hB3ImIkSuBrIdy0vF66h8gf8Tuinf1TK3mPAz2+2sqyf3KzA==} + peerDependencies: + vue: 3.4.27 - /@vuepress/plugin-nprogress@2.0.0-rc.0: - resolution: {integrity: sha512-rI+eK0Pg1KiZE+7hGmDUeSbgdWCid8Vnw0hFKNmjinDzGVmx4m03M6qfvclsI0SryH+lR7itZGLaR4gbTlrz/w==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/server-renderer@3.5.8': + resolution: {integrity: sha512-7AmC9/mEeV9mmXNVyUIm1a1AjUhyeeGNbkLh39J00E7iPeGks8OGRB5blJiMmvqSh8SkaS7jkLWSpXtxUCeagA==} + peerDependencies: + vue: 3.5.8 - /@vuepress/plugin-palette@2.0.0-rc.0: - resolution: {integrity: sha512-wW70SCp3/K7s1lln5YQsBGTog2WXaQv5piva5zhXcQ47YGf4aAJpThDa5C/ot4HhkPOKn8Iz5s0ckxXZzW8DIg==} - dependencies: - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - chokidar: 3.5.3 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/shared@3.4.27': + resolution: {integrity: sha512-DL3NmY2OFlqmYYrzp39yi3LDkKxa5vZVwxWdQ3rG0ekuWscHraeIbnI8t+aZK7qhYqEqWKTUdijadunb9pnrgA==} - /@vuepress/plugin-prismjs@2.0.0-rc.0: - resolution: {integrity: sha512-c5WRI7+FhVjdbymOKQ8F2KY/Bnv7aQtWScVk8vCMUimNi7v7Wff/A/i3KSFNz/tge3LxiAeH/Dc2WS/OnQXwCg==} - dependencies: - '@vuepress/core': 2.0.0-rc.0 - prismjs: 1.29.0 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/shared@3.5.8': + resolution: {integrity: sha512-mJleSWbAGySd2RJdX1RBtcrUBX6snyOc0qHpgk3lGi4l9/P/3ny3ELqFWqYdkXIwwNN/kdm8nD9ky8o6l/Lx2A==} - /@vuepress/plugin-theme-data@2.0.0-rc.0: - resolution: {integrity: sha512-FXY3/Ml+rM6gNKvwdBF6vKAcwnSvtXCzKgQwJAw3ppQTKUkLcbOxqM+h4d8bzHWAAvdnEvQFug5uEZgWllBQbA==} - dependencies: - '@vue/devtools-api': 6.5.1 - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vueuse/core@10.9.0': + resolution: {integrity: sha512-/1vjTol8SXnx6xewDEKfS0Ra//ncg4Hb0DaZiwKf7drgfMsKFExQ+FnnENcN6efPen+1kIzhLQoGSy0eDUVOMg==} - /@vuepress/shared@2.0.0-rc.0: - resolution: {integrity: sha512-ikdSfjRv5LGM1iv4HHwF9P6gqTjaFCXKPK+hzlkHFHNZO1GLqk7/BPc4F51tAG1s8TcLhUZc+54LrfgS7PkXXA==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@vue/shared': 3.3.11 - dev: true + '@vueuse/core@11.1.0': + resolution: {integrity: sha512-P6dk79QYA6sKQnghrUz/1tHi0n9mrb/iO1WTMk/ElLmTyNqgDeSZ3wcDf6fRBGzRJbeG1dxzEOvLENMjr+E3fg==} - /@vuepress/theme-default@2.0.0-rc.0: - resolution: {integrity: sha512-I8Y08evDmMuD1jh3NftPpFFSlCWOizQDJLjN7EQwcg7jiAP4A7c2REo6nBN2EmP24Mi7UrRM+RnytHR5V+pElA==} + '@vueuse/integrations@11.1.0': + resolution: {integrity: sha512-O2ZgrAGPy0qAjpoI2YR3egNgyEqwG85fxfwmA9BshRIGjV4G6yu6CfOPpMHAOoCD+UfsIl7Vb1bXJ6ifrHYDDA==} peerDependencies: - sass-loader: ^13.3.2 + async-validator: ^4 + axios: ^1 + change-case: ^5 + drauu: ^0.4 + focus-trap: ^7 + fuse.js: ^7 + idb-keyval: ^6 + jwt-decode: ^4 + nprogress: ^0.2 + qrcode: ^1.5 + sortablejs: ^1 + universal-cookie: ^7 peerDependenciesMeta: - sass-loader: + async-validator: optional: true - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/plugin-active-header-links': 2.0.0-rc.0 - '@vuepress/plugin-back-to-top': 2.0.0-rc.0 - '@vuepress/plugin-container': 2.0.0-rc.0 - '@vuepress/plugin-external-link-icon': 2.0.0-rc.0 - '@vuepress/plugin-git': 2.0.0-rc.0 - '@vuepress/plugin-medium-zoom': 2.0.0-rc.0 - '@vuepress/plugin-nprogress': 2.0.0-rc.0 - '@vuepress/plugin-palette': 2.0.0-rc.0 - '@vuepress/plugin-prismjs': 2.0.0-rc.0 - '@vuepress/plugin-theme-data': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - sass: 1.69.5 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + axios: + optional: true + change-case: + optional: true + drauu: + optional: true + focus-trap: + optional: true + fuse.js: + optional: true + idb-keyval: + optional: true + jwt-decode: + optional: true + nprogress: + optional: true + qrcode: + optional: true + sortablejs: + optional: true + universal-cookie: + optional: true + + '@vueuse/metadata@10.9.0': + resolution: {integrity: sha512-iddNbg3yZM0X7qFY2sAotomgdHK7YJ6sKUvQqbvwnf7TmaVPxS4EJydcNsVejNdS8iWCtDk+fYXr7E32nyTnGA==} + + '@vueuse/metadata@11.1.0': + resolution: {integrity: sha512-l9Q502TBTaPYGanl1G+hPgd3QX5s4CGnpXriVBR5fEZ/goI6fvDaVmIl3Td8oKFurOxTmbXvBPSsgrd6eu6HYg==} + + '@vueuse/shared@10.9.0': + resolution: {integrity: sha512-Uud2IWncmAfJvRaFYzv5OHDli+FbOzxiVEQdLCKQKLyhz94PIyFC3CHcH7EDMwIn8NPtD06+PNbC/PiO0LGLtw==} + + '@vueuse/shared@11.1.0': + resolution: {integrity: sha512-YUtIpY122q7osj+zsNMFAfMTubGz0sn5QzE5gPzAIiCmtt2ha3uQUY1+JPyL4gRCTsLPX82Y9brNbo/aqlA91w==} + + algoliasearch@4.23.3: + resolution: {integrity: sha512-Le/3YgNvjW9zxIQMRhUHuhiUjAlKY/zsdZpfq4dlLqg6mEm0nL6yk+7f2hDOtLpxsgE4jSzDmvHL7nXdBp5feg==} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.0.1: + resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} + + anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + + arg@5.0.2: + resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + binary-extensions@2.3.0: + resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} + engines: {node: '>=8'} + + birpc@0.2.17: + resolution: {integrity: sha512-+hkTxhot+dWsLpp3gia5AkVHIsKlZybNT5gIYiDlNzJrmYPcTM9k5/w2uaj3IPpd7LlEYpmCj4Jj1nC41VhDFg==} + + brace-expansion@2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + ccount@2.0.1: + resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} + + character-entities-html4@2.1.0: + resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} + + character-entities-legacy@3.0.0: + resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} + + character-entities@2.0.2: + resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} + + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} + engines: {node: '>= 8.10.0'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + comma-separated-tokens@2.0.3: + resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} + + commander@12.0.0: + resolution: {integrity: sha512-MwVNWlYjDTtOjX5PiD7o5pK0UrFU/OYgcJfjjK4RaHZETNtjJqrZa9Y9ds88+A+f+d5lv+561eZ+yCKoS3gbAA==} + engines: {node: '>=18'} + + commander@7.2.0: + resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} + engines: {node: '>= 10'} + + commander@8.3.0: + resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} + engines: {node: '>= 12'} + + copy-anything@3.0.5: + resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} + engines: {node: '>=12.13'} + + cose-base@1.0.3: + resolution: {integrity: sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==} + + cose-base@2.2.0: + resolution: {integrity: sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==} + + cross-spawn@7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + + csstype@3.1.3: + resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} + + cytoscape-cose-bilkent@4.1.0: + resolution: {integrity: sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape-fcose@2.2.0: + resolution: {integrity: sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape@3.30.3: + resolution: {integrity: sha512-HncJ9gGJbVtw7YXtIs3+6YAFSSiKsom0amWc33Z7QbylbY2JGMrA0yz4EwrdTScZxnwclXeEZHzO5pxoy0ZE4g==} + engines: {node: '>=0.10'} + + d3-array@2.12.1: + resolution: {integrity: sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==} + + d3-array@3.2.4: + resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} + engines: {node: '>=12'} + + d3-axis@3.0.0: + resolution: {integrity: sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==} + engines: {node: '>=12'} + + d3-brush@3.0.0: + resolution: {integrity: sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==} + engines: {node: '>=12'} + + d3-chord@3.0.1: + resolution: {integrity: sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==} + engines: {node: '>=12'} + + d3-color@3.1.0: + resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} + engines: {node: '>=12'} + + d3-contour@4.0.2: + resolution: {integrity: sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==} + engines: {node: '>=12'} + + d3-delaunay@6.0.4: + resolution: {integrity: sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==} + engines: {node: '>=12'} + + d3-dispatch@3.0.1: + resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==} + engines: {node: '>=12'} + + d3-drag@3.0.0: + resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==} + engines: {node: '>=12'} + + d3-dsv@3.0.1: + resolution: {integrity: sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==} + engines: {node: '>=12'} + hasBin: true + + d3-ease@3.0.1: + resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} + engines: {node: '>=12'} + + d3-fetch@3.0.1: + resolution: {integrity: sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==} + engines: {node: '>=12'} + + d3-force@3.0.0: + resolution: {integrity: sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==} + engines: {node: '>=12'} + + d3-format@3.1.0: + resolution: {integrity: sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==} + engines: {node: '>=12'} + + d3-geo@3.1.1: + resolution: {integrity: sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==} + engines: {node: '>=12'} + + d3-hierarchy@3.1.2: + resolution: {integrity: sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==} + engines: {node: '>=12'} + + d3-interpolate@3.0.1: + resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} + engines: {node: '>=12'} + + d3-path@1.0.9: + resolution: {integrity: sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==} + + d3-path@3.1.0: + resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} + engines: {node: '>=12'} + + d3-polygon@3.0.1: + resolution: {integrity: sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==} + engines: {node: '>=12'} + + d3-quadtree@3.0.1: + resolution: {integrity: sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==} + engines: {node: '>=12'} + + d3-random@3.0.1: + resolution: {integrity: sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==} + engines: {node: '>=12'} + + d3-sankey@0.12.3: + resolution: {integrity: sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==} + + d3-scale-chromatic@3.1.0: + resolution: {integrity: sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==} + engines: {node: '>=12'} + + d3-scale@4.0.2: + resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} + engines: {node: '>=12'} + + d3-selection@3.0.0: + resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==} + engines: {node: '>=12'} + + d3-shape@1.3.7: + resolution: {integrity: sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==} + + d3-shape@3.2.0: + resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==} + engines: {node: '>=12'} + + d3-time-format@4.1.0: + resolution: {integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==} + engines: {node: '>=12'} + + d3-time@3.1.0: + resolution: {integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==} + engines: {node: '>=12'} + + d3-timer@3.0.1: + resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} + engines: {node: '>=12'} + + d3-transition@3.0.1: + resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==} + engines: {node: '>=12'} + peerDependencies: + d3-selection: 2 - 3 + + d3-zoom@3.0.0: + resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==} + engines: {node: '>=12'} + + d3@7.9.0: + resolution: {integrity: sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==} + engines: {node: '>=12'} + + dagre-d3-es@7.0.10: + resolution: {integrity: sha512-qTCQmEhcynucuaZgY5/+ti3X/rnszKZhEQH/ZdWdtP1tA/y3VoHJzcVrO9pjjJCNpigfscAtoUB5ONcd2wNn0A==} + + dayjs@1.11.13: + resolution: {integrity: sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==} + + debug@4.3.7: + resolution: {integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + decode-named-character-reference@1.0.2: + resolution: {integrity: sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==} + + deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + + delaunator@5.0.1: + resolution: {integrity: sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==} + + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + devlop@1.1.0: + resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} + + diff@5.2.0: + resolution: {integrity: sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==} + engines: {node: '>=0.3.1'} + + dompurify@3.1.6: + resolution: {integrity: sha512-cTOAhc36AalkjtBpfG6O8JimdTMWNXjiePT2xQH/ppBGi/4uIpmj8eKyIkMJErXWARyINV/sB38yf8JCLF5pbQ==} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + elkjs@0.9.3: + resolution: {integrity: sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + entities@4.5.0: + resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} + engines: {node: '>=0.12'} + + esbuild@0.21.5: + resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==} + engines: {node: '>=12'} + hasBin: true + + estree-walker@2.0.2: + resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} + + fast-glob@3.3.2: + resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} + engines: {node: '>=8.6.0'} + + fastq@1.17.1: + resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + focus-trap@7.5.4: + resolution: {integrity: sha512-N7kHdlgsO/v+iD/dMoJKtsSqs5Dz/dXZVebRgJw23LDk+jMi/974zyiOYDziY2JPp8xivq9BmUGwIJMiuSBi7w==} + + foreground-child@3.1.1: + resolution: {integrity: sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==} + engines: {node: '>=14'} + + fsevents@2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + get-stdin@9.0.0: + resolution: {integrity: sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==} + engines: {node: '>=12'} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob@10.3.16: + resolution: {integrity: sha512-JDKXl1DiuuHJ6fVS2FXjownaavciiHNUU4mOvV/B793RLh05vZL1rcPnCSaOgv1hDT6RDlY7AB7ZUvFYAtPgAw==} + engines: {node: '>=16 || 14 >=14.18'} + hasBin: true + + hast-util-to-html@9.0.3: + resolution: {integrity: sha512-M17uBDzMJ9RPCqLMO92gNNUDuBSq10a25SDBI08iCCxmorf4Yy6sYHK57n9WAbRAAaU+DuR4W6GN9K4DFZesYg==} + + hast-util-whitespace@3.0.0: + resolution: {integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==} + + hookable@5.5.3: + resolution: {integrity: sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==} + + html-void-elements@3.0.0: + resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==} + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + + ignore@5.3.1: + resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} + engines: {node: '>= 4'} + + immutable@4.3.7: + resolution: {integrity: sha512-1hqclzwYwjRDFLjcFxOM5AYkkG0rpFPpr1RLPMEuGczoS7YA8gLhy8SWXYRAA/XwfEHpfo3cw5JGioS32fnMRw==} + + ini@4.1.3: + resolution: {integrity: sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + internmap@1.0.1: + resolution: {integrity: sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==} + + internmap@2.0.3: + resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} + engines: {node: '>=12'} + + is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + is-what@4.1.16: + resolution: {integrity: sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==} + engines: {node: '>=12.13'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + jackspeak@3.1.2: + resolution: {integrity: sha512-kWmLKn2tRtfYMF/BakihVVRzBKOxz4gJMiL2Rj91WnAB5TPZumSH99R/Yf1qE1u4uRimvCSJfm6hnxohXeEXjQ==} + engines: {node: '>=14'} + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + jsonc-parser@3.2.1: + resolution: {integrity: sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==} + + jsonpointer@5.0.1: + resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} + engines: {node: '>=0.10.0'} + + katex@0.16.11: + resolution: {integrity: sha512-RQrI8rlHY92OLf3rho/Ts8i/XvjgguEjOkO1BEXcU3N8BqPpSzBNwV/G0Ukr+P/l3ivvJUE/Fa/CwbS6HesGNQ==} + hasBin: true + + khroma@2.1.0: + resolution: {integrity: sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==} + + kleur@4.1.5: + resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} + engines: {node: '>=6'} + + layout-base@1.0.2: + resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==} + + layout-base@2.0.1: + resolution: {integrity: sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==} + + linkify-it@5.0.0: + resolution: {integrity: sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==} + + lodash-es@4.17.21: + resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} + + lru-cache@10.2.2: + resolution: {integrity: sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==} + engines: {node: 14 || >=16.14} + + magic-string@0.30.10: + resolution: {integrity: sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==} + + magic-string@0.30.11: + resolution: {integrity: sha512-+Wri9p0QHMy+545hKww7YAu5NyzF8iomPL/RQazugQ9+Ez4Ic3mERMd8ZTX5rfK944j+560ZJi8iAwgak1Ac7A==} + + mark.js@8.11.1: + resolution: {integrity: sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==} + + markdown-it-footnote@4.0.0: + resolution: {integrity: sha512-WYJ7urf+khJYl3DqofQpYfEYkZKbmXmwxQV8c8mO/hGIhgZ1wOe7R4HLFNwqx7TjILbnC98fuyeSsin19JdFcQ==} + + markdown-it@14.1.0: + resolution: {integrity: sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==} + hasBin: true + + markdownlint-cli@0.40.0: + resolution: {integrity: sha512-JXhI3dRQcaqwiFYpPz6VJ7aKYheD53GmTz9y4D/d0F1MbZDGOp9pqKlbOfUX/pHP/iAoeiE4wYRmk8/kjLakxA==} + engines: {node: '>=18'} + hasBin: true + + markdownlint-micromark@0.1.9: + resolution: {integrity: sha512-5hVs/DzAFa8XqYosbEAEg6ok6MF2smDj89ztn9pKkCtdKHVdPQuGMH7frFfYL9mLkvfFe4pTyAMffLbjf3/EyA==} + engines: {node: '>=18'} + + markdownlint@0.34.0: + resolution: {integrity: sha512-qwGyuyKwjkEMOJ10XN6OTKNOVYvOIi35RNvDLNxTof5s8UmyGHlCdpngRHoRGNvQVGuxO3BJ7uNSgdeX166WXw==} + engines: {node: '>=18'} + + mdast-util-from-markdown@1.3.1: + resolution: {integrity: sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==} + + mdast-util-to-hast@13.2.0: + resolution: {integrity: sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==} + + mdast-util-to-string@3.2.0: + resolution: {integrity: sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==} + + mdurl@2.0.0: + resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + mermaid@10.9.3: + resolution: {integrity: sha512-V80X1isSEvAewIL3xhmz/rVmc27CVljcsbWxkxlWJWY/1kQa4XOABqpDl2qQLGKzpKm6WbTfUEKImBlUfFYArw==} + + micromark-core-commonmark@1.1.0: + resolution: {integrity: sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==} + + micromark-factory-destination@1.1.0: + resolution: {integrity: sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==} + + micromark-factory-label@1.1.0: + resolution: {integrity: sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==} + + micromark-factory-space@1.1.0: + resolution: {integrity: sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==} + + micromark-factory-title@1.1.0: + resolution: {integrity: sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==} + + micromark-factory-whitespace@1.1.0: + resolution: {integrity: sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==} + + micromark-util-character@1.2.0: + resolution: {integrity: sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==} + + micromark-util-character@2.1.0: + resolution: {integrity: sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==} + + micromark-util-chunked@1.1.0: + resolution: {integrity: sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==} + + micromark-util-classify-character@1.1.0: + resolution: {integrity: sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==} + + micromark-util-combine-extensions@1.1.0: + resolution: {integrity: sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==} + + micromark-util-decode-numeric-character-reference@1.1.0: + resolution: {integrity: sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==} + + micromark-util-decode-string@1.1.0: + resolution: {integrity: sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==} + + micromark-util-encode@1.1.0: + resolution: {integrity: sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==} + + micromark-util-encode@2.0.0: + resolution: {integrity: sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==} + + micromark-util-html-tag-name@1.2.0: + resolution: {integrity: sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==} + + micromark-util-normalize-identifier@1.1.0: + resolution: {integrity: sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==} + + micromark-util-resolve-all@1.1.0: + resolution: {integrity: sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==} + + micromark-util-sanitize-uri@1.2.0: + resolution: {integrity: sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==} + + micromark-util-sanitize-uri@2.0.0: + resolution: {integrity: sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==} + + micromark-util-subtokenize@1.1.0: + resolution: {integrity: sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==} + + micromark-util-symbol@1.1.0: + resolution: {integrity: sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==} + + micromark-util-symbol@2.0.0: + resolution: {integrity: sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==} + + micromark-util-types@1.1.0: + resolution: {integrity: sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==} + + micromark-util-types@2.0.0: + resolution: {integrity: sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==} + + micromark@3.2.0: + resolution: {integrity: sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + minimatch@9.0.4: + resolution: {integrity: sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==} + engines: {node: '>=16 || 14 >=14.17'} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + minipass@7.1.1: + resolution: {integrity: sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==} + engines: {node: '>=16 || 14 >=14.17'} + + minisearch@7.1.0: + resolution: {integrity: sha512-tv7c/uefWdEhcu6hvrfTihflgeEi2tN6VV7HJnCjK6VxM75QQJh4t9FwJCsA2EsRS8LCnu3W87CuGPWMocOLCA==} + + mitt@3.0.1: + resolution: {integrity: sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==} + + mri@1.2.0: + resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} + engines: {node: '>=4'} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + nanoid@3.3.7: + resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + non-layered-tidy-tree-layout@2.0.2: + resolution: {integrity: sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==} + + normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + oniguruma-to-js@0.4.3: + resolution: {integrity: sha512-X0jWUcAlxORhOqqBREgPMgnshB7ZGYszBNspP+tS9hPD3l13CdaXcHbgImoHUHlrvGx/7AvFEkTRhAGYh+jzjQ==} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + perfect-debounce@1.0.0: + resolution: {integrity: sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==} + + picocolors@1.1.0: + resolution: {integrity: sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + playwright-core@1.44.0: + resolution: {integrity: sha512-ZTbkNpFfYcGWohvTTl+xewITm7EOuqIqex0c7dNZ+aXsbrLj0qI8XlGKfPpipjm0Wny/4Lt4CJsWJk1stVS5qQ==} + engines: {node: '>=16'} + hasBin: true + + playwright@1.44.0: + resolution: {integrity: sha512-F9b3GUCLQ3Nffrfb6dunPOkE5Mh68tR7zN32L4jCk4FjQamgesGay7/dAAe1WaMEGV04DkdJfcJzjoCKygUaRQ==} + engines: {node: '>=16'} + hasBin: true + + postcss@8.4.47: + resolution: {integrity: sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==} + engines: {node: ^10 || ^12 || >=14} + + preact@10.22.0: + resolution: {integrity: sha512-RRurnSjJPj4rp5K6XoP45Ui33ncb7e4H7WiOHVpjbkvqvA3U+N8Z6Qbo0AE6leGYBV66n8EhEaFixvIu3SkxFw==} + + property-information@6.5.0: + resolution: {integrity: sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==} + + punycode.js@2.3.1: + resolution: {integrity: sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==} + engines: {node: '>=6'} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + + regex@4.3.2: + resolution: {integrity: sha512-kK/AA3A9K6q2js89+VMymcboLOlF5lZRCYJv3gzszXFHBr6kO6qLGzbm+UIugBEV8SMMKCTR59txoY6ctRHYVw==} + + reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + rfdc@1.4.1: + resolution: {integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==} + + robust-predicates@3.0.2: + resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} + + rollup@4.22.4: + resolution: {integrity: sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + + run-con@1.3.2: + resolution: {integrity: sha512-CcfE+mYiTcKEzg0IqS08+efdnH0oJ3zV0wSUFBNrMHMuxCtXvBCLzCJHatwuXDcu/RlhjTziTo/a1ruQik6/Yg==} + hasBin: true + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + rw@1.3.3: + resolution: {integrity: sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==} + + sade@1.8.1: + resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==} + engines: {node: '>=6'} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + sass@1.77.2: + resolution: {integrity: sha512-eb4GZt1C3avsX3heBNlrc7I09nyT00IUuo4eFhAbeXWU2fvA7oXI53SxODVAA+zgZCk9aunAZgO+losjR3fAwA==} + engines: {node: '>=14.0.0'} + hasBin: true + + sax@1.3.0: + resolution: {integrity: sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==} + + search-insights@2.13.0: + resolution: {integrity: sha512-Orrsjf9trHHxFRuo9/rzm0KIWmgzE8RMlZMzuhZOJ01Rnz3D0YBAe+V6473t6/H6c7irs6Lt48brULAiRWb3Vw==} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + shiki@1.18.0: + resolution: {integrity: sha512-8jo7tOXr96h9PBQmOHVrltnETn1honZZY76YA79MHheGQg55jBvbm9dtU+MI5pjC5NJCFuA6rvVTLVeSW5cE4A==} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + sitemap-ts@1.8.0: + resolution: {integrity: sha512-QjsUjYRAyxYaVZuJ0E3u3sTaUzkmXf+QterxUVRas/7dfl+oLIOug2haQR6yLGc/+G2oQEHOj1CaT1b97Xon2Q==} + + sitemap@8.0.0: + resolution: {integrity: sha512-+AbdxhM9kJsHtruUF39bwS/B0Fytw6Fr1o4ZAIAEqA6cke2xcoO2GleBw9Zw7nRzILVEgz7zBM5GiTJjie1G9A==} + engines: {node: '>=14.0.0', npm: '>=6.0.0'} + hasBin: true + + source-map-js@1.2.0: + resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} + engines: {node: '>=0.10.0'} + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + space-separated-tokens@2.0.2: + resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==} + + speakingurl@14.0.1: + resolution: {integrity: sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ==} + engines: {node: '>=0.10.0'} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + stringify-entities@4.0.4: + resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.1.0: + resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} + engines: {node: '>=12'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + stylis@4.3.4: + resolution: {integrity: sha512-osIBl6BGUmSfDkyH2mB7EFvCJntXDrLhKjHTRj/rK6xLH0yuPrHULDRQzKokSOD4VoorhtKpfcfW1GAntu8now==} + + superjson@2.2.1: + resolution: {integrity: sha512-8iGv75BYOa0xRJHK5vRLEjE2H/i4lulTjzpUXic3Eg8akftYjkmQDa8JARQ42rlczXyFR3IeRoeFCc7RxHsYZA==} + engines: {node: '>=16'} + + tabbable@6.2.0: + resolution: {integrity: sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==} + + to-fast-properties@2.0.0: + resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} + engines: {node: '>=4'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + toml@3.0.0: + resolution: {integrity: sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==} + + trim-lines@3.0.1: + resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} + + ts-dedent@2.2.0: + resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} + engines: {node: '>=6.10'} + + uc.micro@2.1.0: + resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} + + undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + + unist-util-is@6.0.0: + resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==} + + unist-util-position@5.0.0: + resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==} + + unist-util-stringify-position@3.0.3: + resolution: {integrity: sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==} + + unist-util-stringify-position@4.0.0: + resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==} + + unist-util-visit-parents@6.0.1: + resolution: {integrity: sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==} + + unist-util-visit@5.0.0: + resolution: {integrity: sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==} + + uuid@9.0.1: + resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} + hasBin: true + + uvu@0.5.6: + resolution: {integrity: sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==} + engines: {node: '>=8'} + hasBin: true + + vfile-message@4.0.2: + resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==} + + vfile@6.0.3: + resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} + + vite@5.4.8: + resolution: {integrity: sha512-FqrItQ4DT1NC4zCUqMB4c4AZORMKIa0m8/URVCZ77OZ/QSNeJ54bU1vrFADbDsuwfIPcgknRkmqakQcgnL4GiQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || >=20.0.0 + less: '*' + lightningcss: ^1.21.0 + sass: '*' + sass-embedded: '*' + stylus: '*' + sugarss: '*' + terser: ^5.4.0 + peerDependenciesMeta: + '@types/node': + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + + vitepress-plugin-mermaid@2.0.16: + resolution: {integrity: sha512-sW0Eu4+1EzRdwZBMGjzwKDsbQiuJIxCy8BlMw7Ur88p9fXalrFYKqZ3wYWLxsFTBipeooFIeanef/xw1P+v7vQ==} + peerDependencies: + mermaid: '10' + vitepress: ^1.0.0 || ^1.0.0-alpha + + vitepress@1.3.4: + resolution: {integrity: sha512-I1/F6OW1xl3kW4PaIMC6snxjWgf3qfziq2aqsDoFc/Gt41WbcRv++z8zjw8qGRIJ+I4bUW7ZcKFDHHN/jkH9DQ==} + hasBin: true + peerDependencies: + markdown-it-mathjax3: ^4 + postcss: ^8 + peerDependenciesMeta: + markdown-it-mathjax3: + optional: true + postcss: + optional: true + + vue-demi@0.14.10: + resolution: {integrity: sha512-nMZBOwuzabUO0nLgIcc6rycZEebF6eeUfaiQx9+WSk8e29IbLvPU9feI6tqW4kTo3hvoYAJkMh8n8D0fuISphg==} + engines: {node: '>=12'} + hasBin: true + peerDependencies: + '@vue/composition-api': ^1.0.0-rc.1 + vue: ^3.0.0-0 || ^2.6.0 + peerDependenciesMeta: + '@vue/composition-api': + optional: true + + vue-demi@0.14.7: + resolution: {integrity: sha512-EOG8KXDQNwkJILkx/gPcoL/7vH+hORoBaKgGe+6W7VFMvCYJfmF2dGbvgDroVnI8LU7/kTu8mbjRZGBU1z9NTA==} + engines: {node: '>=12'} + hasBin: true + peerDependencies: + '@vue/composition-api': ^1.0.0-rc.1 + vue: ^3.0.0-0 || ^2.6.0 + peerDependenciesMeta: + '@vue/composition-api': + optional: true + + vue@3.4.27: + resolution: {integrity: sha512-8s/56uK6r01r1icG/aEOHqyMVxd1bkYcSe9j8HcKtr/xTOFWvnzIVTehNW+5Yt89f+DLBe4A569pnZLS5HzAMA==} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + vue@3.5.8: + resolution: {integrity: sha512-hvuvuCy51nP/1fSRvrrIqTLSvrSyz2Pq+KQ8S8SXCxTWVE0nMaOnSDnSOxV1eYmGfvK7mqiwvd1C59CEEz7dAQ==} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + web-worker@1.3.0: + resolution: {integrity: sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + xml-formatter@3.6.3: + resolution: {integrity: sha512-++x1TlRO1FRlQ82AZ4WnoCSufaI/PT/sycn4K8nRl4gnrNC1uYY2VV/67aALZ2m0Q4Q/BLj/L69K360Itw9NNg==} + engines: {node: '>= 16'} + + xml-parser-xo@4.1.2: + resolution: {integrity: sha512-Z/DRB0ZAKj5vAQg++XsfQQKfT73Vfj5n5lKIVXobBDQEva6NHWUTxOA6OohJmEcpoy8AEqBmSGkXXAnFwt5qAA==} + engines: {node: '>= 16'} + + zwitch@2.0.4: + resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} + +snapshots: + + '@algolia/autocomplete-core@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0)': + dependencies: + '@algolia/autocomplete-plugin-algolia-insights': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0) + '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + transitivePeerDependencies: + - '@algolia/client-search' + - algoliasearch + - search-insights + + '@algolia/autocomplete-plugin-algolia-insights@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0)': + dependencies: + '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + search-insights: 2.13.0 + transitivePeerDependencies: + - '@algolia/client-search' + - algoliasearch + + '@algolia/autocomplete-preset-algolia@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)': + dependencies: + '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + '@algolia/client-search': 4.23.3 + algoliasearch: 4.23.3 + + '@algolia/autocomplete-shared@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)': + dependencies: + '@algolia/client-search': 4.23.3 + algoliasearch: 4.23.3 + + '@algolia/cache-browser-local-storage@4.23.3': + dependencies: + '@algolia/cache-common': 4.23.3 + + '@algolia/cache-common@4.23.3': {} + + '@algolia/cache-in-memory@4.23.3': + dependencies: + '@algolia/cache-common': 4.23.3 + + '@algolia/client-account@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-analytics@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-common@4.23.3': + dependencies: + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-personalization@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-search@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/logger-common@4.23.3': {} + + '@algolia/logger-console@4.23.3': + dependencies: + '@algolia/logger-common': 4.23.3 + + '@algolia/recommend@4.23.3': + dependencies: + '@algolia/cache-browser-local-storage': 4.23.3 + '@algolia/cache-common': 4.23.3 + '@algolia/cache-in-memory': 4.23.3 + '@algolia/client-common': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/logger-common': 4.23.3 + '@algolia/logger-console': 4.23.3 + '@algolia/requester-browser-xhr': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/requester-node-http': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/requester-browser-xhr@4.23.3': + dependencies: + '@algolia/requester-common': 4.23.3 + + '@algolia/requester-common@4.23.3': {} + + '@algolia/requester-node-http@4.23.3': + dependencies: + '@algolia/requester-common': 4.23.3 + + '@algolia/transporter@4.23.3': + dependencies: + '@algolia/cache-common': 4.23.3 + '@algolia/logger-common': 4.23.3 + '@algolia/requester-common': 4.23.3 + + '@antfu/utils@0.7.10': {} + + '@babel/helper-string-parser@7.24.1': {} + + '@babel/helper-string-parser@7.24.8': {} + + '@babel/helper-validator-identifier@7.24.5': {} + + '@babel/helper-validator-identifier@7.24.7': {} + + '@babel/parser@7.24.5': + dependencies: + '@babel/types': 7.24.5 + + '@babel/parser@7.25.6': + dependencies: + '@babel/types': 7.25.6 + + '@babel/types@7.24.5': + dependencies: + '@babel/helper-string-parser': 7.24.1 + '@babel/helper-validator-identifier': 7.24.5 + to-fast-properties: 2.0.0 + + '@babel/types@7.25.6': + dependencies: + '@babel/helper-string-parser': 7.24.8 + '@babel/helper-validator-identifier': 7.24.7 + to-fast-properties: 2.0.0 + + '@braintree/sanitize-url@6.0.4': {} + + '@docsearch/css@3.6.1': {} + + '@docsearch/js@3.6.1(@algolia/client-search@4.23.3)(search-insights@2.13.0)': + dependencies: + '@docsearch/react': 3.6.1(@algolia/client-search@4.23.3)(search-insights@2.13.0) + preact: 10.22.0 + transitivePeerDependencies: + - '@algolia/client-search' + - '@types/react' + - react + - react-dom + - search-insights + + '@docsearch/react@3.6.1(@algolia/client-search@4.23.3)(search-insights@2.13.0)': + dependencies: + '@algolia/autocomplete-core': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0) + '@algolia/autocomplete-preset-algolia': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + '@docsearch/css': 3.6.1 + algoliasearch: 4.23.3 + optionalDependencies: + search-insights: 2.13.0 + transitivePeerDependencies: + - '@algolia/client-search' + + '@esbuild/aix-ppc64@0.21.5': + optional: true + + '@esbuild/android-arm64@0.21.5': + optional: true + + '@esbuild/android-arm@0.21.5': + optional: true + + '@esbuild/android-x64@0.21.5': + optional: true + + '@esbuild/darwin-arm64@0.21.5': + optional: true + + '@esbuild/darwin-x64@0.21.5': + optional: true + + '@esbuild/freebsd-arm64@0.21.5': + optional: true + + '@esbuild/freebsd-x64@0.21.5': + optional: true + + '@esbuild/linux-arm64@0.21.5': + optional: true + + '@esbuild/linux-arm@0.21.5': + optional: true + + '@esbuild/linux-ia32@0.21.5': + optional: true + + '@esbuild/linux-loong64@0.21.5': + optional: true + + '@esbuild/linux-mips64el@0.21.5': + optional: true + + '@esbuild/linux-ppc64@0.21.5': + optional: true + + '@esbuild/linux-riscv64@0.21.5': + optional: true + + '@esbuild/linux-s390x@0.21.5': + optional: true + + '@esbuild/linux-x64@0.21.5': + optional: true + + '@esbuild/netbsd-x64@0.21.5': + optional: true + + '@esbuild/openbsd-x64@0.21.5': + optional: true + + '@esbuild/sunos-x64@0.21.5': + optional: true + + '@esbuild/win32-arm64@0.21.5': + optional: true + + '@esbuild/win32-ia32@0.21.5': + optional: true + + '@esbuild/win32-x64@0.21.5': + optional: true + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@jridgewell/sourcemap-codec@1.4.15': {} + + '@jridgewell/sourcemap-codec@1.5.0': {} + + '@mermaid-js/mermaid-mindmap@9.3.0': + dependencies: + '@braintree/sanitize-url': 6.0.4 + cytoscape: 3.30.3 + cytoscape-cose-bilkent: 4.1.0(cytoscape@3.30.3) + cytoscape-fcose: 2.2.0(cytoscape@3.30.3) + d3: 7.9.0 + khroma: 2.1.0 + non-layered-tidy-tree-layout: 2.0.2 + optional: true + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.17.1 + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@playwright/test@1.44.0': + dependencies: + playwright: 1.44.0 + + '@rollup/rollup-android-arm-eabi@4.22.4': + optional: true + + '@rollup/rollup-android-arm64@4.22.4': + optional: true + + '@rollup/rollup-darwin-arm64@4.22.4': + optional: true + + '@rollup/rollup-darwin-x64@4.22.4': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.22.4': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.22.4': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.22.4': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.22.4': + optional: true + + '@rollup/rollup-linux-powerpc64le-gnu@4.22.4': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.22.4': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.22.4': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.22.4': + optional: true + + '@rollup/rollup-linux-x64-musl@4.22.4': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.22.4': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.22.4': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.22.4': + optional: true + + '@shikijs/core@1.18.0': + dependencies: + '@shikijs/engine-javascript': 1.18.0 + '@shikijs/engine-oniguruma': 1.18.0 + '@shikijs/types': 1.18.0 + '@shikijs/vscode-textmate': 9.2.2 + '@types/hast': 3.0.4 + hast-util-to-html: 9.0.3 + + '@shikijs/engine-javascript@1.18.0': + dependencies: + '@shikijs/types': 1.18.0 + '@shikijs/vscode-textmate': 9.2.2 + oniguruma-to-js: 0.4.3 + + '@shikijs/engine-oniguruma@1.18.0': + dependencies: + '@shikijs/types': 1.18.0 + '@shikijs/vscode-textmate': 9.2.2 + + '@shikijs/transformers@1.18.0': + dependencies: + shiki: 1.18.0 + + '@shikijs/types@1.18.0': + dependencies: + '@shikijs/vscode-textmate': 9.2.2 + '@types/hast': 3.0.4 + + '@shikijs/vscode-textmate@9.2.2': {} + + '@types/d3-scale-chromatic@3.0.3': {} + + '@types/d3-scale@4.0.8': + dependencies: + '@types/d3-time': 3.0.3 + + '@types/d3-time@3.0.3': {} + + '@types/debug@4.1.12': + dependencies: + '@types/ms': 0.7.34 + + '@types/estree@1.0.5': {} + + '@types/hast@3.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/linkify-it@5.0.0': {} + + '@types/markdown-it@14.1.2': + dependencies: + '@types/linkify-it': 5.0.0 + '@types/mdurl': 2.0.0 + + '@types/mdast@3.0.15': + dependencies: + '@types/unist': 2.0.11 + + '@types/mdast@4.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/mdurl@2.0.0': {} + + '@types/ms@0.7.34': {} + + '@types/node@17.0.45': {} + + '@types/node@20.12.12': + dependencies: + undici-types: 5.26.5 + + '@types/sax@1.2.7': + dependencies: + '@types/node': 20.12.12 + + '@types/unist@2.0.11': {} + + '@types/unist@3.0.3': {} + + '@types/web-bluetooth@0.0.20': {} + + '@ungap/structured-clone@1.2.0': {} + + '@vitejs/plugin-vue@5.1.4(vite@5.4.8(@types/node@20.12.12)(sass@1.77.2))(vue@3.5.8)': + dependencies: + vite: 5.4.8(@types/node@20.12.12)(sass@1.77.2) + vue: 3.5.8 + + '@vue/compiler-core@3.4.27': + dependencies: + '@babel/parser': 7.24.5 + '@vue/shared': 3.4.27 + entities: 4.5.0 + estree-walker: 2.0.2 + source-map-js: 1.2.1 - /@vuepress/utils@2.0.0-rc.0: - resolution: {integrity: sha512-Q1ay/woClDHcW0Qe91KsnHoupdNN0tp/vhjvVLuAYxlv/1Obii7hz9WFcajyyGEhmsYxdvG2sGmcxFA02tuKkw==} + '@vue/compiler-core@3.5.8': dependencies: - '@types/debug': 4.1.12 - '@types/fs-extra': 11.0.4 - '@types/hash-sum': 1.0.2 - '@vuepress/shared': 2.0.0-rc.0 - debug: 4.3.4 - fs-extra: 11.2.0 - globby: 14.0.0 - hash-sum: 2.0.0 - ora: 7.0.1 - picocolors: 1.0.0 - upath: 2.0.1 - transitivePeerDependencies: - - supports-color - dev: true + '@babel/parser': 7.25.6 + '@vue/shared': 3.5.8 + entities: 4.5.0 + estree-walker: 2.0.2 + source-map-js: 1.2.1 + + '@vue/compiler-dom@3.4.27': + dependencies: + '@vue/compiler-core': 3.4.27 + '@vue/shared': 3.4.27 + + '@vue/compiler-dom@3.5.8': + dependencies: + '@vue/compiler-core': 3.5.8 + '@vue/shared': 3.5.8 + + '@vue/compiler-sfc@3.4.27': + dependencies: + '@babel/parser': 7.24.5 + '@vue/compiler-core': 3.4.27 + '@vue/compiler-dom': 3.4.27 + '@vue/compiler-ssr': 3.4.27 + '@vue/shared': 3.4.27 + estree-walker: 2.0.2 + magic-string: 0.30.10 + postcss: 8.4.47 + source-map-js: 1.2.0 + + '@vue/compiler-sfc@3.5.8': + dependencies: + '@babel/parser': 7.25.6 + '@vue/compiler-core': 3.5.8 + '@vue/compiler-dom': 3.5.8 + '@vue/compiler-ssr': 3.5.8 + '@vue/shared': 3.5.8 + estree-walker: 2.0.2 + magic-string: 0.30.11 + postcss: 8.4.47 + source-map-js: 1.2.1 + + '@vue/compiler-ssr@3.4.27': + dependencies: + '@vue/compiler-dom': 3.4.27 + '@vue/shared': 3.4.27 + + '@vue/compiler-ssr@3.5.8': + dependencies: + '@vue/compiler-dom': 3.5.8 + '@vue/shared': 3.5.8 + + '@vue/devtools-api@7.4.5': + dependencies: + '@vue/devtools-kit': 7.4.5 + + '@vue/devtools-kit@7.4.5': + dependencies: + '@vue/devtools-shared': 7.4.5 + birpc: 0.2.17 + hookable: 5.5.3 + mitt: 3.0.1 + perfect-debounce: 1.0.0 + speakingurl: 14.0.1 + superjson: 2.2.1 + + '@vue/devtools-shared@7.4.5': + dependencies: + rfdc: 1.4.1 + + '@vue/reactivity@3.4.27': + dependencies: + '@vue/shared': 3.4.27 + + '@vue/reactivity@3.5.8': + dependencies: + '@vue/shared': 3.5.8 + + '@vue/runtime-core@3.4.27': + dependencies: + '@vue/reactivity': 3.4.27 + '@vue/shared': 3.4.27 - /@vueuse/core@10.7.0(vue@3.3.11): - resolution: {integrity: sha512-4EUDESCHtwu44ZWK3Gc/hZUVhVo/ysvdtwocB5vcauSV4B7NiGY5972WnsojB3vRNdxvAt7kzJWE2h9h7C9d5w==} + '@vue/runtime-core@3.5.8': + dependencies: + '@vue/reactivity': 3.5.8 + '@vue/shared': 3.5.8 + + '@vue/runtime-dom@3.4.27': + dependencies: + '@vue/runtime-core': 3.4.27 + '@vue/shared': 3.4.27 + csstype: 3.1.3 + + '@vue/runtime-dom@3.5.8': + dependencies: + '@vue/reactivity': 3.5.8 + '@vue/runtime-core': 3.5.8 + '@vue/shared': 3.5.8 + csstype: 3.1.3 + + '@vue/server-renderer@3.4.27(vue@3.4.27)': + dependencies: + '@vue/compiler-ssr': 3.4.27 + '@vue/shared': 3.4.27 + vue: 3.4.27 + + '@vue/server-renderer@3.5.8(vue@3.5.8)': + dependencies: + '@vue/compiler-ssr': 3.5.8 + '@vue/shared': 3.5.8 + vue: 3.5.8 + + '@vue/shared@3.4.27': {} + + '@vue/shared@3.5.8': {} + + '@vueuse/core@10.9.0(vue@3.4.27)': dependencies: '@types/web-bluetooth': 0.0.20 - '@vueuse/metadata': 10.7.0 - '@vueuse/shared': 10.7.0(vue@3.3.11) - vue-demi: 0.14.6(vue@3.3.11) + '@vueuse/metadata': 10.9.0 + '@vueuse/shared': 10.9.0(vue@3.4.27) + vue-demi: 0.14.7(vue@3.4.27) transitivePeerDependencies: - '@vue/composition-api' - vue - dev: true - /@vueuse/metadata@10.7.0: - resolution: {integrity: sha512-GlaH7tKP2iBCZ3bHNZ6b0cl9g0CJK8lttkBNUX156gWvNYhTKEtbweWLm9rxCPIiwzYcr/5xML6T8ZUEt+DkvA==} - dev: true + '@vueuse/core@11.1.0(vue@3.5.8)': + dependencies: + '@types/web-bluetooth': 0.0.20 + '@vueuse/metadata': 11.1.0 + '@vueuse/shared': 11.1.0(vue@3.5.8) + vue-demi: 0.14.10(vue@3.5.8) + transitivePeerDependencies: + - '@vue/composition-api' + - vue - /@vueuse/shared@10.7.0(vue@3.3.11): - resolution: {integrity: sha512-kc00uV6CiaTdc3i1CDC4a3lBxzaBE9AgYNtFN87B5OOscqeWElj/uza8qVDmk7/U8JbqoONLbtqiLJ5LGRuqlw==} + '@vueuse/integrations@11.1.0(focus-trap@7.5.4)(vue@3.5.8)': dependencies: - vue-demi: 0.14.6(vue@3.3.11) + '@vueuse/core': 11.1.0(vue@3.5.8) + '@vueuse/shared': 11.1.0(vue@3.5.8) + vue-demi: 0.14.10(vue@3.5.8) + optionalDependencies: + focus-trap: 7.5.4 transitivePeerDependencies: - '@vue/composition-api' - vue - dev: true - - /algoliasearch@4.21.1: - resolution: {integrity: sha512-Ym0MGwOcjQhZ+s1N/j0o94g3vQD0MzNpWsfJLyPVCt0zHflbi0DwYX+9GPmTJ4BzegoxWMyCPgcmpd3R+VlOzQ==} - dependencies: - '@algolia/cache-browser-local-storage': 4.21.1 - '@algolia/cache-common': 4.21.1 - '@algolia/cache-in-memory': 4.21.1 - '@algolia/client-account': 4.21.1 - '@algolia/client-analytics': 4.21.1 - '@algolia/client-common': 4.21.1 - '@algolia/client-personalization': 4.21.1 - '@algolia/client-search': 4.21.1 - '@algolia/logger-common': 4.21.1 - '@algolia/logger-console': 4.21.1 - '@algolia/requester-browser-xhr': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/requester-node-http': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true - - /ansi-regex@6.0.1: - resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} - engines: {node: '>=12'} - dev: true - /anymatch@3.1.3: - resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} - engines: {node: '>= 8'} + '@vueuse/metadata@10.9.0': {} + + '@vueuse/metadata@11.1.0': {} + + '@vueuse/shared@10.9.0(vue@3.4.27)': dependencies: - normalize-path: 3.0.0 - picomatch: 2.3.1 - dev: true + vue-demi: 0.14.7(vue@3.4.27) + transitivePeerDependencies: + - '@vue/composition-api' + - vue - /arg@5.0.2: - resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} - dev: true + '@vueuse/shared@11.1.0(vue@3.5.8)': + dependencies: + vue-demi: 0.14.10(vue@3.5.8) + transitivePeerDependencies: + - '@vue/composition-api' + - vue - /argparse@1.0.10: - resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + algoliasearch@4.23.3: dependencies: - sprintf-js: 1.0.3 - dev: true + '@algolia/cache-browser-local-storage': 4.23.3 + '@algolia/cache-common': 4.23.3 + '@algolia/cache-in-memory': 4.23.3 + '@algolia/client-account': 4.23.3 + '@algolia/client-analytics': 4.23.3 + '@algolia/client-common': 4.23.3 + '@algolia/client-personalization': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/logger-common': 4.23.3 + '@algolia/logger-console': 4.23.3 + '@algolia/recommend': 4.23.3 + '@algolia/requester-browser-xhr': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/requester-node-http': 4.23.3 + '@algolia/transporter': 4.23.3 - /argparse@2.0.1: - resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - dev: true + ansi-regex@5.0.1: {} - /autoprefixer@10.4.16(postcss@8.4.32): - resolution: {integrity: sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==} - engines: {node: ^10 || ^12 || >=14} - hasBin: true - peerDependencies: - postcss: ^8.1.0 - dependencies: - browserslist: 4.22.2 - caniuse-lite: 1.0.30001568 - fraction.js: 4.3.7 - normalize-range: 0.1.2 - picocolors: 1.0.0 - postcss: 8.4.32 - postcss-value-parser: 4.2.0 - dev: true - - /base64-js@1.5.1: - resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - dev: true - - /binary-extensions@2.2.0: - resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} - engines: {node: '>=8'} - dev: true + ansi-regex@6.0.1: {} - /bl@5.1.0: - resolution: {integrity: sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==} + ansi-styles@4.3.0: dependencies: - buffer: 6.0.3 - inherits: 2.0.4 - readable-stream: 3.6.2 - dev: true + color-convert: 2.0.1 - /boolbase@1.0.0: - resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} - dev: true + ansi-styles@6.2.1: {} - /braces@3.0.2: - resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} - engines: {node: '>=8'} + anymatch@3.1.3: dependencies: - fill-range: 7.0.1 - dev: true + normalize-path: 3.0.0 + picomatch: 2.3.1 + optional: true - /browserslist@4.22.2: - resolution: {integrity: sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==} - engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} - hasBin: true - dependencies: - caniuse-lite: 1.0.30001568 - electron-to-chromium: 1.4.611 - node-releases: 2.0.14 - update-browserslist-db: 1.0.13(browserslist@4.22.2) - dev: true + arg@5.0.2: {} + + argparse@2.0.1: {} - /buffer@6.0.3: - resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + balanced-match@1.0.2: {} + + binary-extensions@2.3.0: + optional: true + + birpc@0.2.17: {} + + brace-expansion@2.0.1: dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - dev: true + balanced-match: 1.0.2 - /cac@6.7.14: - resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} - engines: {node: '>=8'} - dev: true - - /caniuse-lite@1.0.30001568: - resolution: {integrity: sha512-vSUkH84HontZJ88MiNrOau1EBrCqEQYgkC5gIySiDlpsm8sGVrhU7Kx4V6h0tnqaHzIHZv08HlJIwPbL4XL9+A==} - dev: true - - /chalk@5.3.0: - resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - dev: true - - /cheerio-select@2.1.0: - resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} - dependencies: - boolbase: 1.0.0 - css-select: 5.1.0 - css-what: 6.1.0 - domelementtype: 2.3.0 - domhandler: 5.0.3 - domutils: 3.1.0 - dev: true - - /cheerio@1.0.0-rc.12: - resolution: {integrity: sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==} - engines: {node: '>= 6'} + braces@3.0.3: dependencies: - cheerio-select: 2.1.0 - dom-serializer: 2.0.0 - domhandler: 5.0.3 - domutils: 3.1.0 - htmlparser2: 8.0.2 - parse5: 7.1.2 - parse5-htmlparser2-tree-adapter: 7.0.0 - dev: true - - /chokidar@3.5.3: - resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} - engines: {node: '>= 8.10.0'} + fill-range: 7.1.1 + + ccount@2.0.1: {} + + character-entities-html4@2.1.0: {} + + character-entities-legacy@3.0.0: {} + + character-entities@2.0.2: {} + + chokidar@3.6.0: dependencies: anymatch: 3.1.3 - braces: 3.0.2 + braces: 3.0.3 glob-parent: 5.1.2 is-binary-path: 2.1.0 is-glob: 4.0.3 @@ -1307,1062 +2255,1061 @@ packages: readdirp: 3.6.0 optionalDependencies: fsevents: 2.3.3 - dev: true + optional: true - /cli-cursor@4.0.0: - resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + color-convert@2.0.1: dependencies: - restore-cursor: 4.0.0 - dev: true + color-name: 1.1.4 - /cli-spinners@2.9.2: - resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} - engines: {node: '>=6'} - dev: true + color-name@1.1.4: {} - /connect-history-api-fallback@2.0.0: - resolution: {integrity: sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==} - engines: {node: '>=0.8'} - dev: true + comma-separated-tokens@2.0.3: {} - /cross-spawn@7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} - engines: {node: '>= 8'} + commander@12.0.0: {} + + commander@7.2.0: {} + + commander@8.3.0: {} + + copy-anything@3.0.5: + dependencies: + is-what: 4.1.16 + + cose-base@1.0.3: + dependencies: + layout-base: 1.0.2 + + cose-base@2.2.0: + dependencies: + layout-base: 2.0.1 + optional: true + + cross-spawn@7.0.3: dependencies: path-key: 3.1.1 shebang-command: 2.0.0 which: 2.0.2 - dev: true - /css-select@5.1.0: - resolution: {integrity: sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==} + csstype@3.1.3: {} + + cytoscape-cose-bilkent@4.1.0(cytoscape@3.30.3): dependencies: - boolbase: 1.0.0 - css-what: 6.1.0 - domhandler: 5.0.3 - domutils: 3.1.0 - nth-check: 2.1.1 - dev: true + cose-base: 1.0.3 + cytoscape: 3.30.3 - /css-what@6.1.0: - resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==} - engines: {node: '>= 6'} - dev: true + cytoscape-fcose@2.2.0(cytoscape@3.30.3): + dependencies: + cose-base: 2.2.0 + cytoscape: 3.30.3 + optional: true - /csstype@3.1.3: - resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} - dev: true + cytoscape@3.30.3: {} + + d3-array@2.12.1: + dependencies: + internmap: 1.0.1 - /dayjs@1.11.10: - resolution: {integrity: sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ==} - dev: true + d3-array@3.2.4: + dependencies: + internmap: 2.0.3 - /debug@4.3.4: - resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true + d3-axis@3.0.0: {} + + d3-brush@3.0.0: dependencies: - ms: 2.1.2 - dev: true + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) - /dom-serializer@2.0.0: - resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} + d3-chord@3.0.1: dependencies: - domelementtype: 2.3.0 - domhandler: 5.0.3 - entities: 4.5.0 - dev: true + d3-path: 3.1.0 - /domelementtype@2.3.0: - resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} - dev: true + d3-color@3.1.0: {} - /domhandler@5.0.3: - resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} - engines: {node: '>= 4'} + d3-contour@4.0.2: dependencies: - domelementtype: 2.3.0 - dev: true + d3-array: 3.2.4 - /domutils@3.1.0: - resolution: {integrity: sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==} + d3-delaunay@6.0.4: dependencies: - dom-serializer: 2.0.0 - domelementtype: 2.3.0 - domhandler: 5.0.3 - dev: true + delaunator: 5.0.1 - /eastasianwidth@0.2.0: - resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - dev: true + d3-dispatch@3.0.1: {} - /electron-to-chromium@1.4.611: - resolution: {integrity: sha512-ZtRpDxrjHapOwxtv+nuth5ByB8clyn8crVynmRNGO3wG3LOp8RTcyZDqwaI6Ng6y8FCK2hVZmJoqwCskKbNMaw==} - dev: true + d3-drag@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-selection: 3.0.0 - /emoji-regex@10.3.0: - resolution: {integrity: sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==} - dev: true + d3-dsv@3.0.1: + dependencies: + commander: 7.2.0 + iconv-lite: 0.6.3 + rw: 1.3.3 - /entities@3.0.1: - resolution: {integrity: sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==} - engines: {node: '>=0.12'} - dev: true + d3-ease@3.0.1: {} - /entities@4.5.0: - resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} - engines: {node: '>=0.12'} - dev: true + d3-fetch@3.0.1: + dependencies: + d3-dsv: 3.0.1 - /envinfo@7.11.0: - resolution: {integrity: sha512-G9/6xF1FPbIw0TtalAMaVPpiq2aDEuKLXM314jPVAO9r2fo2a4BLqMNkmRS7O/xPPZ+COAhGIz3ETvHEV3eUcg==} - engines: {node: '>=4'} - hasBin: true - dev: true + d3-force@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-quadtree: 3.0.1 + d3-timer: 3.0.1 - /esbuild@0.19.9: - resolution: {integrity: sha512-U9CHtKSy+EpPsEBa+/A2gMs/h3ylBC0H0KSqIg7tpztHerLi6nrrcoUJAkNCEPumx8yJ+Byic4BVwHgRbN0TBg==} - engines: {node: '>=12'} - hasBin: true - requiresBuild: true - optionalDependencies: - '@esbuild/android-arm': 0.19.9 - '@esbuild/android-arm64': 0.19.9 - '@esbuild/android-x64': 0.19.9 - '@esbuild/darwin-arm64': 0.19.9 - '@esbuild/darwin-x64': 0.19.9 - '@esbuild/freebsd-arm64': 0.19.9 - '@esbuild/freebsd-x64': 0.19.9 - '@esbuild/linux-arm': 0.19.9 - '@esbuild/linux-arm64': 0.19.9 - '@esbuild/linux-ia32': 0.19.9 - '@esbuild/linux-loong64': 0.19.9 - '@esbuild/linux-mips64el': 0.19.9 - '@esbuild/linux-ppc64': 0.19.9 - '@esbuild/linux-riscv64': 0.19.9 - '@esbuild/linux-s390x': 0.19.9 - '@esbuild/linux-x64': 0.19.9 - '@esbuild/netbsd-x64': 0.19.9 - '@esbuild/openbsd-x64': 0.19.9 - '@esbuild/sunos-x64': 0.19.9 - '@esbuild/win32-arm64': 0.19.9 - '@esbuild/win32-ia32': 0.19.9 - '@esbuild/win32-x64': 0.19.9 - dev: true - - /escalade@3.1.1: - resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} - engines: {node: '>=6'} - dev: true + d3-format@3.1.0: {} - /esprima@4.0.1: - resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} - engines: {node: '>=4'} - hasBin: true - dev: true + d3-geo@3.1.1: + dependencies: + d3-array: 3.2.4 - /estree-walker@2.0.2: - resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} - dev: true + d3-hierarchy@3.1.2: {} - /execa@8.0.1: - resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} - engines: {node: '>=16.17'} + d3-interpolate@3.0.1: dependencies: - cross-spawn: 7.0.3 - get-stream: 8.0.1 - human-signals: 5.0.0 - is-stream: 3.0.0 - merge-stream: 2.0.0 - npm-run-path: 5.1.0 - onetime: 6.0.0 - signal-exit: 4.1.0 - strip-final-newline: 3.0.0 - dev: true + d3-color: 3.1.0 - /extend-shallow@2.0.1: - resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==} - engines: {node: '>=0.10.0'} + d3-path@1.0.9: {} + + d3-path@3.1.0: {} + + d3-polygon@3.0.1: {} + + d3-quadtree@3.0.1: {} + + d3-random@3.0.1: {} + + d3-sankey@0.12.3: dependencies: - is-extendable: 0.1.1 - dev: true + d3-array: 2.12.1 + d3-shape: 1.3.7 - /fast-glob@3.3.2: - resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} - engines: {node: '>=8.6.0'} + d3-scale-chromatic@3.1.0: + dependencies: + d3-color: 3.1.0 + d3-interpolate: 3.0.1 + + d3-scale@4.0.2: + dependencies: + d3-array: 3.2.4 + d3-format: 3.1.0 + d3-interpolate: 3.0.1 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + + d3-selection@3.0.0: {} + + d3-shape@1.3.7: + dependencies: + d3-path: 1.0.9 + + d3-shape@3.2.0: + dependencies: + d3-path: 3.1.0 + + d3-time-format@4.1.0: + dependencies: + d3-time: 3.1.0 + + d3-time@3.1.0: + dependencies: + d3-array: 3.2.4 + + d3-timer@3.0.1: {} + + d3-transition@3.0.1(d3-selection@3.0.0): + dependencies: + d3-color: 3.1.0 + d3-dispatch: 3.0.1 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-timer: 3.0.1 + + d3-zoom@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + + d3@7.9.0: + dependencies: + d3-array: 3.2.4 + d3-axis: 3.0.0 + d3-brush: 3.0.0 + d3-chord: 3.0.1 + d3-color: 3.1.0 + d3-contour: 4.0.2 + d3-delaunay: 6.0.4 + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-dsv: 3.0.1 + d3-ease: 3.0.1 + d3-fetch: 3.0.1 + d3-force: 3.0.0 + d3-format: 3.1.0 + d3-geo: 3.1.1 + d3-hierarchy: 3.1.2 + d3-interpolate: 3.0.1 + d3-path: 3.1.0 + d3-polygon: 3.0.1 + d3-quadtree: 3.0.1 + d3-random: 3.0.1 + d3-scale: 4.0.2 + d3-scale-chromatic: 3.1.0 + d3-selection: 3.0.0 + d3-shape: 3.2.0 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + d3-timer: 3.0.1 + d3-transition: 3.0.1(d3-selection@3.0.0) + d3-zoom: 3.0.0 + + dagre-d3-es@7.0.10: + dependencies: + d3: 7.9.0 + lodash-es: 4.17.21 + + dayjs@1.11.13: {} + + debug@4.3.7: + dependencies: + ms: 2.1.3 + + decode-named-character-reference@1.0.2: + dependencies: + character-entities: 2.0.2 + + deep-extend@0.6.0: {} + + delaunator@5.0.1: + dependencies: + robust-predicates: 3.0.2 + + dequal@2.0.3: {} + + devlop@1.1.0: + dependencies: + dequal: 2.0.3 + + diff@5.2.0: {} + + dompurify@3.1.6: {} + + eastasianwidth@0.2.0: {} + + elkjs@0.9.3: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + entities@4.5.0: {} + + esbuild@0.21.5: + optionalDependencies: + '@esbuild/aix-ppc64': 0.21.5 + '@esbuild/android-arm': 0.21.5 + '@esbuild/android-arm64': 0.21.5 + '@esbuild/android-x64': 0.21.5 + '@esbuild/darwin-arm64': 0.21.5 + '@esbuild/darwin-x64': 0.21.5 + '@esbuild/freebsd-arm64': 0.21.5 + '@esbuild/freebsd-x64': 0.21.5 + '@esbuild/linux-arm': 0.21.5 + '@esbuild/linux-arm64': 0.21.5 + '@esbuild/linux-ia32': 0.21.5 + '@esbuild/linux-loong64': 0.21.5 + '@esbuild/linux-mips64el': 0.21.5 + '@esbuild/linux-ppc64': 0.21.5 + '@esbuild/linux-riscv64': 0.21.5 + '@esbuild/linux-s390x': 0.21.5 + '@esbuild/linux-x64': 0.21.5 + '@esbuild/netbsd-x64': 0.21.5 + '@esbuild/openbsd-x64': 0.21.5 + '@esbuild/sunos-x64': 0.21.5 + '@esbuild/win32-arm64': 0.21.5 + '@esbuild/win32-ia32': 0.21.5 + '@esbuild/win32-x64': 0.21.5 + + estree-walker@2.0.2: {} + + fast-glob@3.3.2: dependencies: '@nodelib/fs.stat': 2.0.5 '@nodelib/fs.walk': 1.2.8 glob-parent: 5.1.2 merge2: 1.4.1 - micromatch: 4.0.5 - dev: true + micromatch: 4.0.8 - /fastq@1.15.0: - resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} + fastq@1.17.1: dependencies: reusify: 1.0.4 - dev: true - /fflate@0.8.1: - resolution: {integrity: sha512-/exOvEuc+/iaUm105QIiOt4LpBdMTWsXxqR0HDF35vx3fmaKzw7354gTilCh5rkzEt8WYyG//ku3h3nRmd7CHQ==} - dev: true - - /fill-range@7.0.1: - resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} - engines: {node: '>=8'} + fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 - dev: true - /fraction.js@4.3.7: - resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==} - dev: true + focus-trap@7.5.4: + dependencies: + tabbable: 6.2.0 - /fs-extra@11.2.0: - resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} - engines: {node: '>=14.14'} + foreground-child@3.1.1: dependencies: - graceful-fs: 4.2.11 - jsonfile: 6.1.0 - universalify: 2.0.1 - dev: true + cross-spawn: 7.0.3 + signal-exit: 4.1.0 - /fsevents@2.3.3: - resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - requiresBuild: true - dev: true + fsevents@2.3.2: optional: true - /get-stream@8.0.1: - resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} - engines: {node: '>=16'} - dev: true + fsevents@2.3.3: + optional: true - /glob-parent@5.1.2: - resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} - engines: {node: '>= 6'} + get-stdin@9.0.0: {} + + glob-parent@5.1.2: dependencies: is-glob: 4.0.3 - dev: true - /globby@14.0.0: - resolution: {integrity: sha512-/1WM/LNHRAOH9lZta77uGbq0dAEQM+XjNesWwhlERDVenqothRbnzTrL3/LrIoEPPjeUHC3vrS6TwoyxeHs7MQ==} - engines: {node: '>=18'} + glob@10.3.16: dependencies: - '@sindresorhus/merge-streams': 1.0.0 - fast-glob: 3.3.2 - ignore: 5.3.0 - path-type: 5.0.0 - slash: 5.1.0 - unicorn-magic: 0.1.0 - dev: true - - /graceful-fs@4.2.11: - resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} - dev: true - - /gray-matter@4.0.3: - resolution: {integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==} - engines: {node: '>=6.0'} + foreground-child: 3.1.1 + jackspeak: 3.1.2 + minimatch: 9.0.4 + minipass: 7.1.1 + path-scurry: 1.11.1 + + hast-util-to-html@9.0.3: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + comma-separated-tokens: 2.0.3 + hast-util-whitespace: 3.0.0 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.0 + property-information: 6.5.0 + space-separated-tokens: 2.0.2 + stringify-entities: 4.0.4 + zwitch: 2.0.4 + + hast-util-whitespace@3.0.0: + dependencies: + '@types/hast': 3.0.4 + + hookable@5.5.3: {} + + html-void-elements@3.0.0: {} + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + + ignore@5.3.1: {} + + immutable@4.3.7: + optional: true + + ini@4.1.3: {} + + internmap@1.0.1: {} + + internmap@2.0.3: {} + + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + optional: true + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-number@7.0.0: {} + + is-what@4.1.16: {} + + isexe@2.0.0: {} + + jackspeak@3.1.2: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + jsonc-parser@3.2.1: {} + + jsonpointer@5.0.1: {} + + katex@0.16.11: + dependencies: + commander: 8.3.0 + + khroma@2.1.0: {} + + kleur@4.1.5: {} + + layout-base@1.0.2: {} + + layout-base@2.0.1: + optional: true + + linkify-it@5.0.0: + dependencies: + uc.micro: 2.1.0 + + lodash-es@4.17.21: {} + + lru-cache@10.2.2: {} + + magic-string@0.30.10: + dependencies: + '@jridgewell/sourcemap-codec': 1.4.15 + + magic-string@0.30.11: dependencies: - js-yaml: 3.14.1 - kind-of: 6.0.3 - section-matter: 1.0.0 - strip-bom-string: 1.0.0 - dev: true + '@jridgewell/sourcemap-codec': 1.5.0 - /hash-sum@2.0.0: - resolution: {integrity: sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==} - dev: true + mark.js@8.11.1: {} - /htmlparser2@8.0.2: - resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==} + markdown-it-footnote@4.0.0: {} + + markdown-it@14.1.0: dependencies: - domelementtype: 2.3.0 - domhandler: 5.0.3 - domutils: 3.1.0 + argparse: 2.0.1 entities: 4.5.0 - dev: true - - /human-signals@5.0.0: - resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} - engines: {node: '>=16.17.0'} - dev: true - - /ieee754@1.2.1: - resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - dev: true + linkify-it: 5.0.0 + mdurl: 2.0.0 + punycode.js: 2.3.1 + uc.micro: 2.1.0 + + markdownlint-cli@0.40.0: + dependencies: + commander: 12.0.0 + get-stdin: 9.0.0 + glob: 10.3.16 + ignore: 5.3.1 + js-yaml: 4.1.0 + jsonc-parser: 3.2.1 + jsonpointer: 5.0.1 + markdownlint: 0.34.0 + minimatch: 9.0.4 + run-con: 1.3.2 + toml: 3.0.0 + + markdownlint-micromark@0.1.9: {} + + markdownlint@0.34.0: + dependencies: + markdown-it: 14.1.0 + markdownlint-micromark: 0.1.9 + + mdast-util-from-markdown@1.3.1: + dependencies: + '@types/mdast': 3.0.15 + '@types/unist': 2.0.11 + decode-named-character-reference: 1.0.2 + mdast-util-to-string: 3.2.0 + micromark: 3.2.0 + micromark-util-decode-numeric-character-reference: 1.1.0 + micromark-util-decode-string: 1.1.0 + micromark-util-normalize-identifier: 1.1.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + unist-util-stringify-position: 3.0.3 + uvu: 0.5.6 + transitivePeerDependencies: + - supports-color - /ignore@5.3.0: - resolution: {integrity: sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==} - engines: {node: '>= 4'} - dev: true + mdast-util-to-hast@13.2.0: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@ungap/structured-clone': 1.2.0 + devlop: 1.1.0 + micromark-util-sanitize-uri: 2.0.0 + trim-lines: 3.0.1 + unist-util-position: 5.0.0 + unist-util-visit: 5.0.0 + vfile: 6.0.3 + + mdast-util-to-string@3.2.0: + dependencies: + '@types/mdast': 3.0.15 + + mdurl@2.0.0: {} + + merge2@1.4.1: {} + + mermaid@10.9.3: + dependencies: + '@braintree/sanitize-url': 6.0.4 + '@types/d3-scale': 4.0.8 + '@types/d3-scale-chromatic': 3.0.3 + cytoscape: 3.30.3 + cytoscape-cose-bilkent: 4.1.0(cytoscape@3.30.3) + d3: 7.9.0 + d3-sankey: 0.12.3 + dagre-d3-es: 7.0.10 + dayjs: 1.11.13 + dompurify: 3.1.6 + elkjs: 0.9.3 + katex: 0.16.11 + khroma: 2.1.0 + lodash-es: 4.17.21 + mdast-util-from-markdown: 1.3.1 + non-layered-tidy-tree-layout: 2.0.2 + stylis: 4.3.4 + ts-dedent: 2.2.0 + uuid: 9.0.1 + web-worker: 1.3.0 + transitivePeerDependencies: + - supports-color - /immutable@4.3.4: - resolution: {integrity: sha512-fsXeu4J4i6WNWSikpI88v/PcVflZz+6kMhUfIwc5SY+poQRPnaf5V7qds6SUyUN3cVxEzuCab7QIoLOQ+DQ1wA==} - dev: true + micromark-core-commonmark@1.1.0: + dependencies: + decode-named-character-reference: 1.0.2 + micromark-factory-destination: 1.1.0 + micromark-factory-label: 1.1.0 + micromark-factory-space: 1.1.0 + micromark-factory-title: 1.1.0 + micromark-factory-whitespace: 1.1.0 + micromark-util-character: 1.2.0 + micromark-util-chunked: 1.1.0 + micromark-util-classify-character: 1.1.0 + micromark-util-html-tag-name: 1.2.0 + micromark-util-normalize-identifier: 1.1.0 + micromark-util-resolve-all: 1.1.0 + micromark-util-subtokenize: 1.1.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + uvu: 0.5.6 - /inherits@2.0.4: - resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - dev: true + micromark-factory-destination@1.1.0: + dependencies: + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /is-binary-path@2.1.0: - resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} - engines: {node: '>=8'} + micromark-factory-label@1.1.0: dependencies: - binary-extensions: 2.2.0 - dev: true + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + uvu: 0.5.6 - /is-extendable@0.1.1: - resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==} - engines: {node: '>=0.10.0'} - dev: true + micromark-factory-space@1.1.0: + dependencies: + micromark-util-character: 1.2.0 + micromark-util-types: 1.1.0 - /is-extglob@2.1.1: - resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} - engines: {node: '>=0.10.0'} - dev: true + micromark-factory-title@1.1.0: + dependencies: + micromark-factory-space: 1.1.0 + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /is-glob@4.0.3: - resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} - engines: {node: '>=0.10.0'} + micromark-factory-whitespace@1.1.0: dependencies: - is-extglob: 2.1.1 - dev: true + micromark-factory-space: 1.1.0 + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /is-interactive@2.0.0: - resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} - engines: {node: '>=12'} - dev: true + micromark-util-character@1.2.0: + dependencies: + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /is-number@7.0.0: - resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} - engines: {node: '>=0.12.0'} - dev: true + micromark-util-character@2.1.0: + dependencies: + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 - /is-stream@3.0.0: - resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dev: true + micromark-util-chunked@1.1.0: + dependencies: + micromark-util-symbol: 1.1.0 - /is-unicode-supported@1.3.0: - resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} - engines: {node: '>=12'} - dev: true + micromark-util-classify-character@1.1.0: + dependencies: + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /isexe@2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - dev: true + micromark-util-combine-extensions@1.1.0: + dependencies: + micromark-util-chunked: 1.1.0 + micromark-util-types: 1.1.0 - /js-yaml@3.14.1: - resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} - hasBin: true + micromark-util-decode-numeric-character-reference@1.1.0: dependencies: - argparse: 1.0.10 - esprima: 4.0.1 - dev: true + micromark-util-symbol: 1.1.0 - /jsonfile@6.1.0: - resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + micromark-util-decode-string@1.1.0: dependencies: - universalify: 2.0.1 - optionalDependencies: - graceful-fs: 4.2.11 - dev: true + decode-named-character-reference: 1.0.2 + micromark-util-character: 1.2.0 + micromark-util-decode-numeric-character-reference: 1.1.0 + micromark-util-symbol: 1.1.0 - /kind-of@6.0.3: - resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} - engines: {node: '>=0.10.0'} - dev: true + micromark-util-encode@1.1.0: {} - /lilconfig@3.0.0: - resolution: {integrity: sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==} - engines: {node: '>=14'} - dev: true + micromark-util-encode@2.0.0: {} - /linkify-it@4.0.1: - resolution: {integrity: sha512-C7bfi1UZmoj8+PQx22XyeXCuBlokoyWQL5pWSP+EI6nzRylyThouddufc2c1NDIcP9k5agmN9fLpA7VNJfIiqw==} - dependencies: - uc.micro: 1.0.6 - dev: true + micromark-util-html-tag-name@1.2.0: {} - /log-symbols@5.1.0: - resolution: {integrity: sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==} - engines: {node: '>=12'} + micromark-util-normalize-identifier@1.1.0: dependencies: - chalk: 5.3.0 - is-unicode-supported: 1.3.0 - dev: true + micromark-util-symbol: 1.1.0 - /lru-cache@6.0.0: - resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} - engines: {node: '>=10'} + micromark-util-resolve-all@1.1.0: dependencies: - yallist: 4.0.0 - dev: true + micromark-util-types: 1.1.0 - /magic-string@0.30.5: - resolution: {integrity: sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==} - engines: {node: '>=12'} + micromark-util-sanitize-uri@1.2.0: dependencies: - '@jridgewell/sourcemap-codec': 1.4.15 - dev: true + micromark-util-character: 1.2.0 + micromark-util-encode: 1.1.0 + micromark-util-symbol: 1.1.0 - /markdown-it-anchor@8.6.7(@types/markdown-it@13.0.7)(markdown-it@13.0.2): - resolution: {integrity: sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==} - peerDependencies: - '@types/markdown-it': '*' - markdown-it: '*' + micromark-util-sanitize-uri@2.0.0: dependencies: - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /markdown-it-container@3.0.0: - resolution: {integrity: sha512-y6oKTq4BB9OQuY/KLfk/O3ysFhB3IMYoIWhGJEidXt1NQFocFK2sA2t0NYZAMyMShAGL6x5OPIbrmXPIqaN9rw==} - dev: true + micromark-util-character: 2.1.0 + micromark-util-encode: 2.0.0 + micromark-util-symbol: 2.0.0 - /markdown-it-emoji@2.0.2: - resolution: {integrity: sha512-zLftSaNrKuYl0kR5zm4gxXjHaOI3FAOEaloKmRA5hijmJZvSjmxcokOLlzycb/HXlUFWzXqpIEoyEMCE4i9MvQ==} - dev: true - - /markdown-it@13.0.2: - resolution: {integrity: sha512-FtwnEuuK+2yVU7goGn/MJ0WBZMM9ZPgU9spqlFs7/A/pDIUNSOQZhUgOqYCficIuR2QaFnrt8LHqBWsbTAoI5w==} - hasBin: true + micromark-util-subtokenize@1.1.0: dependencies: - argparse: 2.0.1 - entities: 3.0.1 - linkify-it: 4.0.1 - mdurl: 1.0.1 - uc.micro: 1.0.6 - dev: true + micromark-util-chunked: 1.1.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + uvu: 0.5.6 - /mdurl@1.0.1: - resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==} - dev: true + micromark-util-symbol@1.1.0: {} - /medium-zoom@1.1.0: - resolution: {integrity: sha512-ewyDsp7k4InCUp3jRmwHBRFGyjBimKps/AJLjRSox+2q/2H4p/PNpQf+pwONWlJiOudkBXtbdmVbFjqyybfTmQ==} - dev: true + micromark-util-symbol@2.0.0: {} - /merge-stream@2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - dev: true + micromark-util-types@1.1.0: {} - /merge2@1.4.1: - resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} - engines: {node: '>= 8'} - dev: true + micromark-util-types@2.0.0: {} - /micromatch@4.0.5: - resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} - engines: {node: '>=8.6'} + micromark@3.2.0: dependencies: - braces: 3.0.2 - picomatch: 2.3.1 - dev: true + '@types/debug': 4.1.12 + debug: 4.3.7 + decode-named-character-reference: 1.0.2 + micromark-core-commonmark: 1.1.0 + micromark-factory-space: 1.1.0 + micromark-util-character: 1.2.0 + micromark-util-chunked: 1.1.0 + micromark-util-combine-extensions: 1.1.0 + micromark-util-decode-numeric-character-reference: 1.1.0 + micromark-util-encode: 1.1.0 + micromark-util-normalize-identifier: 1.1.0 + micromark-util-resolve-all: 1.1.0 + micromark-util-sanitize-uri: 1.2.0 + micromark-util-subtokenize: 1.1.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + uvu: 0.5.6 + transitivePeerDependencies: + - supports-color - /mimic-fn@2.1.0: - resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} - engines: {node: '>=6'} - dev: true + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 - /mimic-fn@4.0.0: - resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} - engines: {node: '>=12'} - dev: true + minimatch@9.0.4: + dependencies: + brace-expansion: 2.0.1 - /ms@2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - dev: true + minimist@1.2.8: {} - /nanoid@3.3.7: - resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true - dev: true + minipass@7.1.1: {} - /node-releases@2.0.14: - resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} - dev: true + minisearch@7.1.0: {} - /normalize-path@3.0.0: - resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} - engines: {node: '>=0.10.0'} - dev: true + mitt@3.0.1: {} - /normalize-range@0.1.2: - resolution: {integrity: sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==} - engines: {node: '>=0.10.0'} - dev: true + mri@1.2.0: {} - /npm-run-path@5.1.0: - resolution: {integrity: sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - path-key: 4.0.0 - dev: true + ms@2.1.3: {} - /nth-check@2.1.1: - resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} - dependencies: - boolbase: 1.0.0 - dev: true + nanoid@3.3.7: {} - /onetime@5.1.2: - resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} - engines: {node: '>=6'} - dependencies: - mimic-fn: 2.1.0 - dev: true + non-layered-tidy-tree-layout@2.0.2: {} - /onetime@6.0.0: - resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} - engines: {node: '>=12'} - dependencies: - mimic-fn: 4.0.0 - dev: true + normalize-path@3.0.0: + optional: true - /ora@7.0.1: - resolution: {integrity: sha512-0TUxTiFJWv+JnjWm4o9yvuskpEJLXTcng8MJuKd+SzAzp2o+OP3HWqNhB4OdJRt1Vsd9/mR0oyaEYlOnL7XIRw==} - engines: {node: '>=16'} + oniguruma-to-js@0.4.3: dependencies: - chalk: 5.3.0 - cli-cursor: 4.0.0 - cli-spinners: 2.9.2 - is-interactive: 2.0.0 - is-unicode-supported: 1.3.0 - log-symbols: 5.1.0 - stdin-discarder: 0.1.0 - string-width: 6.1.0 - strip-ansi: 7.1.0 - dev: true + regex: 4.3.2 - /parse5-htmlparser2-tree-adapter@7.0.0: - resolution: {integrity: sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==} - dependencies: - domhandler: 5.0.3 - parse5: 7.1.2 - dev: true + path-key@3.1.1: {} - /parse5@7.1.2: - resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==} + path-scurry@1.11.1: dependencies: - entities: 4.5.0 - dev: true - - /path-key@3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} - dev: true + lru-cache: 10.2.2 + minipass: 7.1.1 - /path-key@4.0.0: - resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} - engines: {node: '>=12'} - dev: true + perfect-debounce@1.0.0: {} - /path-type@5.0.0: - resolution: {integrity: sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==} - engines: {node: '>=12'} - dev: true + picocolors@1.1.0: {} - /picocolors@1.0.0: - resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} - dev: true + picomatch@2.3.1: {} - /picomatch@2.3.1: - resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} - engines: {node: '>=8.6'} - dev: true + playwright-core@1.44.0: {} - /postcss-load-config@4.0.2(postcss@8.4.32): - resolution: {integrity: sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==} - engines: {node: '>= 14'} - peerDependencies: - postcss: '>=8.0.9' - ts-node: '>=9.0.0' - peerDependenciesMeta: - postcss: - optional: true - ts-node: - optional: true + playwright@1.44.0: dependencies: - lilconfig: 3.0.0 - postcss: 8.4.32 - yaml: 2.3.4 - dev: true - - /postcss-value-parser@4.2.0: - resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} - dev: true + playwright-core: 1.44.0 + optionalDependencies: + fsevents: 2.3.2 - /postcss@8.4.32: - resolution: {integrity: sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==} - engines: {node: ^10 || ^12 || >=14} + postcss@8.4.47: dependencies: nanoid: 3.3.7 - picocolors: 1.0.0 - source-map-js: 1.0.2 - dev: true + picocolors: 1.1.0 + source-map-js: 1.2.1 - /preact@10.19.3: - resolution: {integrity: sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ==} - dev: true + preact@10.22.0: {} - /prismjs@1.29.0: - resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==} - engines: {node: '>=6'} - dev: true + property-information@6.5.0: {} - /queue-microtask@1.2.3: - resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - dev: true + punycode.js@2.3.1: {} - /readable-stream@3.6.2: - resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} - engines: {node: '>= 6'} - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - dev: true + queue-microtask@1.2.3: {} - /readdirp@3.6.0: - resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} - engines: {node: '>=8.10.0'} + readdirp@3.6.0: dependencies: picomatch: 2.3.1 - dev: true + optional: true - /restore-cursor@4.0.0: - resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - onetime: 5.1.2 - signal-exit: 3.0.7 - dev: true + regex@4.3.2: {} - /reusify@1.0.4: - resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} - engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - dev: true + reusify@1.0.4: {} - /rollup@4.8.0: - resolution: {integrity: sha512-NpsklK2fach5CdI+PScmlE5R4Ao/FSWtF7LkoIrHDxPACY/xshNasPsbpG0VVHxUTbf74tJbVT4PrP8JsJ6ZDA==} - engines: {node: '>=18.0.0', npm: '>=8.0.0'} - hasBin: true + rfdc@1.4.1: {} + + robust-predicates@3.0.2: {} + + rollup@4.22.4: + dependencies: + '@types/estree': 1.0.5 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.8.0 - '@rollup/rollup-android-arm64': 4.8.0 - '@rollup/rollup-darwin-arm64': 4.8.0 - '@rollup/rollup-darwin-x64': 4.8.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.8.0 - '@rollup/rollup-linux-arm64-gnu': 4.8.0 - '@rollup/rollup-linux-arm64-musl': 4.8.0 - '@rollup/rollup-linux-riscv64-gnu': 4.8.0 - '@rollup/rollup-linux-x64-gnu': 4.8.0 - '@rollup/rollup-linux-x64-musl': 4.8.0 - '@rollup/rollup-win32-arm64-msvc': 4.8.0 - '@rollup/rollup-win32-ia32-msvc': 4.8.0 - '@rollup/rollup-win32-x64-msvc': 4.8.0 + '@rollup/rollup-android-arm-eabi': 4.22.4 + '@rollup/rollup-android-arm64': 4.22.4 + '@rollup/rollup-darwin-arm64': 4.22.4 + '@rollup/rollup-darwin-x64': 4.22.4 + '@rollup/rollup-linux-arm-gnueabihf': 4.22.4 + '@rollup/rollup-linux-arm-musleabihf': 4.22.4 + '@rollup/rollup-linux-arm64-gnu': 4.22.4 + '@rollup/rollup-linux-arm64-musl': 4.22.4 + '@rollup/rollup-linux-powerpc64le-gnu': 4.22.4 + '@rollup/rollup-linux-riscv64-gnu': 4.22.4 + '@rollup/rollup-linux-s390x-gnu': 4.22.4 + '@rollup/rollup-linux-x64-gnu': 4.22.4 + '@rollup/rollup-linux-x64-musl': 4.22.4 + '@rollup/rollup-win32-arm64-msvc': 4.22.4 + '@rollup/rollup-win32-ia32-msvc': 4.22.4 + '@rollup/rollup-win32-x64-msvc': 4.22.4 fsevents: 2.3.3 - dev: true - /run-parallel@1.2.0: - resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + run-con@1.3.2: + dependencies: + deep-extend: 0.6.0 + ini: 4.1.3 + minimist: 1.2.8 + strip-json-comments: 3.1.1 + + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 - dev: true - /safe-buffer@5.2.1: - resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - dev: true + rw@1.3.3: {} - /sass@1.69.5: - resolution: {integrity: sha512-qg2+UCJibLr2LCVOt3OlPhr/dqVHWOa9XtZf2OjbLs/T4VPSJ00udtgJxH3neXZm+QqX8B+3cU7RaLqp1iVfcQ==} - engines: {node: '>=14.0.0'} - hasBin: true + sade@1.8.1: dependencies: - chokidar: 3.5.3 - immutable: 4.3.4 - source-map-js: 1.0.2 - dev: true - - /sax@1.3.0: - resolution: {integrity: sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==} - dev: true + mri: 1.2.0 - /search-insights@2.13.0: - resolution: {integrity: sha512-Orrsjf9trHHxFRuo9/rzm0KIWmgzE8RMlZMzuhZOJ01Rnz3D0YBAe+V6473t6/H6c7irs6Lt48brULAiRWb3Vw==} - dev: true + safer-buffer@2.1.2: {} - /section-matter@1.0.0: - resolution: {integrity: sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==} - engines: {node: '>=4'} + sass@1.77.2: dependencies: - extend-shallow: 2.0.1 - kind-of: 6.0.3 - dev: true + chokidar: 3.6.0 + immutable: 4.3.7 + source-map-js: 1.2.1 + optional: true - /semver@7.5.4: - resolution: {integrity: sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==} - engines: {node: '>=10'} - hasBin: true - dependencies: - lru-cache: 6.0.0 - dev: true + sax@1.3.0: {} - /shebang-command@2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} + search-insights@2.13.0: {} + + shebang-command@2.0.0: dependencies: shebang-regex: 3.0.0 - dev: true - /shebang-regex@3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} - dev: true + shebang-regex@3.0.0: {} - /signal-exit@3.0.7: - resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} - dev: true + shiki@1.18.0: + dependencies: + '@shikijs/core': 1.18.0 + '@shikijs/engine-javascript': 1.18.0 + '@shikijs/engine-oniguruma': 1.18.0 + '@shikijs/types': 1.18.0 + '@shikijs/vscode-textmate': 9.2.2 + '@types/hast': 3.0.4 - /signal-exit@4.1.0: - resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} - engines: {node: '>=14'} - dev: true + signal-exit@4.1.0: {} - /sitemap@7.1.1: - resolution: {integrity: sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg==} - engines: {node: '>=12.0.0', npm: '>=5.6.0'} - hasBin: true + sitemap-ts@1.8.0: + dependencies: + '@antfu/utils': 0.7.10 + fast-glob: 3.3.2 + sitemap: 8.0.0 + xml-formatter: 3.6.3 + + sitemap@8.0.0: dependencies: '@types/node': 17.0.45 '@types/sax': 1.2.7 arg: 5.0.2 sax: 1.3.0 - dev: true - /slash@5.1.0: - resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==} - engines: {node: '>=14.16'} - dev: true + source-map-js@1.2.0: {} - /source-map-js@1.0.2: - resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} - engines: {node: '>=0.10.0'} - dev: true + source-map-js@1.2.1: {} - /sprintf-js@1.0.3: - resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - dev: true + space-separated-tokens@2.0.2: {} - /stdin-discarder@0.1.0: - resolution: {integrity: sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + speakingurl@14.0.1: {} + + string-width@4.2.3: dependencies: - bl: 5.1.0 - dev: true + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 - /string-width@6.1.0: - resolution: {integrity: sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ==} - engines: {node: '>=16'} + string-width@5.1.2: dependencies: eastasianwidth: 0.2.0 - emoji-regex: 10.3.0 + emoji-regex: 9.2.2 strip-ansi: 7.1.0 - dev: true - /string_decoder@1.3.0: - resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + stringify-entities@4.0.4: dependencies: - safe-buffer: 5.2.1 - dev: true + character-entities-html4: 2.1.0 + character-entities-legacy: 3.0.0 - /strip-ansi@7.1.0: - resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} - engines: {node: '>=12'} + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.1.0: dependencies: ansi-regex: 6.0.1 - dev: true - /strip-bom-string@1.0.0: - resolution: {integrity: sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==} - engines: {node: '>=0.10.0'} - dev: true + strip-json-comments@3.1.1: {} - /strip-final-newline@3.0.0: - resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} - engines: {node: '>=12'} - dev: true + stylis@4.3.4: {} - /striptags@3.2.0: - resolution: {integrity: sha512-g45ZOGzHDMe2bdYMdIvdAfCQkCTDMGBazSw1ypMowwGIee7ZQ5dU0rBJ8Jqgl+jAKIv4dbeE1jscZq9wid1Tkw==} - dev: true + superjson@2.2.1: + dependencies: + copy-anything: 3.0.5 - /to-fast-properties@2.0.0: - resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} - engines: {node: '>=4'} - dev: true + tabbable@6.2.0: {} - /to-regex-range@5.0.1: - resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} - engines: {node: '>=8.0'} + to-fast-properties@2.0.0: {} + + to-regex-range@5.0.1: dependencies: is-number: 7.0.0 - dev: true - /ts-debounce@4.0.0: - resolution: {integrity: sha512-+1iDGY6NmOGidq7i7xZGA4cm8DAa6fqdYcvO5Z6yBevH++Bdo9Qt/mN0TzHUgcCcKv1gmh9+W5dHqz8pMWbCbg==} - dev: true + toml@3.0.0: {} - /uc.micro@1.0.6: - resolution: {integrity: sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==} - dev: true + trim-lines@3.0.1: {} - /undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - dev: true + ts-dedent@2.2.0: {} - /unicorn-magic@0.1.0: - resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==} - engines: {node: '>=18'} - dev: true + uc.micro@2.1.0: {} - /universalify@2.0.1: - resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} - engines: {node: '>= 10.0.0'} - dev: true + undici-types@5.26.5: {} - /upath@2.0.1: - resolution: {integrity: sha512-1uEe95xksV1O0CYKXo8vQvN1JEbtJp7lb7C5U9HMsIp6IVwntkH/oNUzyVNQSd4S1sYk2FpSSW44FqMc8qee5w==} - engines: {node: '>=4'} - dev: true + unist-util-is@6.0.0: + dependencies: + '@types/unist': 3.0.3 - /update-browserslist-db@1.0.13(browserslist@4.22.2): - resolution: {integrity: sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==} - hasBin: true - peerDependencies: - browserslist: '>= 4.21.0' + unist-util-position@5.0.0: dependencies: - browserslist: 4.22.2 - escalade: 3.1.1 - picocolors: 1.0.0 - dev: true + '@types/unist': 3.0.3 - /util-deprecate@1.0.2: - resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - dev: true + unist-util-stringify-position@3.0.3: + dependencies: + '@types/unist': 2.0.11 - /vite@5.0.12: - resolution: {integrity: sha512-4hsnEkG3q0N4Tzf1+t6NdN9dg/L3BM+q8SWgbSPnJvrgH2kgdyzfVJwbR1ic69/4uMJJ/3dqDZZE5/WwqW8U1w==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@types/node': ^18.0.0 || >=20.0.0 - less: '*' - lightningcss: ^1.21.0 - sass: '*' - stylus: '*' - sugarss: '*' - terser: ^5.4.0 - peerDependenciesMeta: - '@types/node': - optional: true - less: - optional: true - lightningcss: - optional: true - sass: - optional: true - stylus: - optional: true - sugarss: - optional: true - terser: - optional: true + unist-util-stringify-position@4.0.0: dependencies: - esbuild: 0.19.9 - postcss: 8.4.32 - rollup: 4.8.0 - optionalDependencies: - fsevents: 2.3.3 - dev: true + '@types/unist': 3.0.3 - /vue-demi@0.14.6(vue@3.3.11): - resolution: {integrity: sha512-8QA7wrYSHKaYgUxDA5ZC24w+eHm3sYCbp0EzcDwKqN3p6HqtTCGR/GVsPyZW92unff4UlcSh++lmqDWN3ZIq4w==} - engines: {node: '>=12'} - hasBin: true - requiresBuild: true - peerDependencies: - '@vue/composition-api': ^1.0.0-rc.1 - vue: ^3.0.0-0 || ^2.6.0 - peerDependenciesMeta: - '@vue/composition-api': - optional: true + unist-util-visit-parents@6.0.1: dependencies: - vue: 3.3.11 - dev: true + '@types/unist': 3.0.3 + unist-util-is: 6.0.0 - /vue-router@4.2.5(vue@3.3.11): - resolution: {integrity: sha512-DIUpKcyg4+PTQKfFPX88UWhlagBEBEfJ5A8XDXRJLUnZOvcpMF8o/dnL90vpVkGaPbjvXazV/rC1qBKrZlFugw==} - peerDependencies: - vue: ^3.2.0 + unist-util-visit@5.0.0: dependencies: - '@vue/devtools-api': 6.5.1 - vue: 3.3.11 - dev: true + '@types/unist': 3.0.3 + unist-util-is: 6.0.0 + unist-util-visit-parents: 6.0.1 - /vue@3.3.11: - resolution: {integrity: sha512-d4oBctG92CRO1cQfVBZp6WJAs0n8AK4Xf5fNjQCBeKCvMI1efGQ5E3Alt1slFJS9fZuPcFoiAiqFvQlv1X7t/w==} - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true + uuid@9.0.1: {} + + uvu@0.5.6: dependencies: - '@vue/compiler-dom': 3.3.11 - '@vue/compiler-sfc': 3.3.11 - '@vue/runtime-dom': 3.3.11 - '@vue/server-renderer': 3.3.11(vue@3.3.11) - '@vue/shared': 3.3.11 - dev: true + dequal: 2.0.3 + diff: 5.2.0 + kleur: 4.1.5 + sade: 1.8.1 - /vuepress-plugin-sitemap2@2.0.0-rc.4(vuepress@2.0.0-rc.0): - resolution: {integrity: sha512-zi57grbyAFL54HUZNmmAWELYgwPsqa8p63HkEBSpXiQEa3JbYumAXHPZp4sIBGlBxcF8X34GtddrVw9FDlCtZA==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} - peerDependencies: - vuepress: 2.0.0-rc.0 - vuepress-vite: 2.0.0-rc.0 - vuepress-webpack: 2.0.0-rc.0 - peerDependenciesMeta: - vuepress: - optional: true - vuepress-vite: - optional: true - vuepress-webpack: - optional: true + vfile-message@4.0.2: dependencies: - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - sitemap: 7.1.1 - vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - vuepress-shared: 2.0.0-rc.4(vuepress@2.0.0-rc.0) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@types/unist': 3.0.3 + unist-util-stringify-position: 4.0.0 - /vuepress-shared@2.0.0-rc.4(vuepress@2.0.0-rc.0): - resolution: {integrity: sha512-YndYftQ9AUdWWESZHFZ7QjuUGXqgVayHzu3Qfar9GWr45NP2ZW7edKN4adU2/bOiokYG1Rfj47dgMUrRxEgqhg==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} - peerDependencies: - vuepress: 2.0.0-rc.0 - vuepress-vite: 2.0.0-rc.0 - vuepress-webpack: 2.0.0-rc.0 - peerDependenciesMeta: - vuepress: - optional: true - vuepress-vite: - optional: true - vuepress-webpack: - optional: true + vfile@6.0.3: dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - cheerio: 1.0.0-rc.12 - dayjs: 1.11.10 - execa: 8.0.1 - fflate: 0.8.1 - gray-matter: 4.0.3 - semver: 7.5.4 - striptags: 3.2.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@types/unist': 3.0.3 + vfile-message: 4.0.2 - /vuepress-vite@2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11): - resolution: {integrity: sha512-+2XBejeiskPyr2raBeA2o4uDFDsjtadpUVmtio3qqFtQpOhidz/ORuiTLr2UfLtFn1ASIHP6Vy2YjQ0e/TeUVw==} - engines: {node: '>=18.16.0'} - hasBin: true - peerDependencies: - '@vuepress/client': 2.0.0-rc.0 - vue: ^3.3.4 - dependencies: - '@vuepress/bundler-vite': 2.0.0-rc.0 - '@vuepress/cli': 2.0.0-rc.0 - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/theme-default': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@types/node' - - '@vue/composition-api' - - less - - lightningcss - - sass - - sass-loader - - stylus - - sugarss - - supports-color - - terser - - ts-node - - typescript - dev: true + vite@5.4.8(@types/node@20.12.12)(sass@1.77.2): + dependencies: + esbuild: 0.21.5 + postcss: 8.4.47 + rollup: 4.22.4 + optionalDependencies: + '@types/node': 20.12.12 + fsevents: 2.3.3 + sass: 1.77.2 - /vuepress@2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11): - resolution: {integrity: sha512-sydt/B7+pIw926G5PntYmptLkC5o2buXKh+WR1+P2KnsvkXU+UGnQrJJ0FBvu/4RNuY99tkUZd59nyPhEmRrCg==} - engines: {node: '>=18.16.0'} - hasBin: true + vitepress-plugin-mermaid@2.0.16(mermaid@10.9.3)(vitepress@1.3.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.47)(sass@1.77.2)(search-insights@2.13.0)): dependencies: - vuepress-vite: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) + mermaid: 10.9.3 + vitepress: 1.3.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.47)(sass@1.77.2)(search-insights@2.13.0) + optionalDependencies: + '@mermaid-js/mermaid-mindmap': 9.3.0 + + vitepress@1.3.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.47)(sass@1.77.2)(search-insights@2.13.0): + dependencies: + '@docsearch/css': 3.6.1 + '@docsearch/js': 3.6.1(@algolia/client-search@4.23.3)(search-insights@2.13.0) + '@shikijs/core': 1.18.0 + '@shikijs/transformers': 1.18.0 + '@types/markdown-it': 14.1.2 + '@vitejs/plugin-vue': 5.1.4(vite@5.4.8(@types/node@20.12.12)(sass@1.77.2))(vue@3.5.8) + '@vue/devtools-api': 7.4.5 + '@vue/shared': 3.5.8 + '@vueuse/core': 11.1.0(vue@3.5.8) + '@vueuse/integrations': 11.1.0(focus-trap@7.5.4)(vue@3.5.8) + focus-trap: 7.5.4 + mark.js: 8.11.1 + minisearch: 7.1.0 + shiki: 1.18.0 + vite: 5.4.8(@types/node@20.12.12)(sass@1.77.2) + vue: 3.5.8 + optionalDependencies: + postcss: 8.4.47 transitivePeerDependencies: + - '@algolia/client-search' - '@types/node' + - '@types/react' - '@vue/composition-api' - - '@vuepress/client' + - async-validator + - axios + - change-case + - drauu + - fuse.js + - idb-keyval + - jwt-decode - less - lightningcss + - nprogress + - qrcode + - react + - react-dom - sass - - sass-loader + - sass-embedded + - search-insights + - sortablejs - stylus - sugarss - - supports-color - terser - - ts-node - typescript - - vue - dev: true + - universal-cookie - /which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true + vue-demi@0.14.10(vue@3.5.8): + dependencies: + vue: 3.5.8 + + vue-demi@0.14.7(vue@3.4.27): + dependencies: + vue: 3.4.27 + + vue@3.4.27: + dependencies: + '@vue/compiler-dom': 3.4.27 + '@vue/compiler-sfc': 3.4.27 + '@vue/runtime-dom': 3.4.27 + '@vue/server-renderer': 3.4.27(vue@3.4.27) + '@vue/shared': 3.4.27 + + vue@3.5.8: + dependencies: + '@vue/compiler-dom': 3.5.8 + '@vue/compiler-sfc': 3.5.8 + '@vue/runtime-dom': 3.5.8 + '@vue/server-renderer': 3.5.8(vue@3.5.8) + '@vue/shared': 3.5.8 + + web-worker@1.3.0: {} + + which@2.0.2: dependencies: isexe: 2.0.0 - dev: true - /yallist@4.0.0: - resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} - dev: true + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + + xml-formatter@3.6.3: + dependencies: + xml-parser-xo: 4.1.2 + + xml-parser-xo@4.1.2: {} - /yaml@2.3.4: - resolution: {integrity: sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==} - engines: {node: '>= 14'} - dev: true + zwitch@2.0.4: {} diff --git a/runatlantis.io/.vitepress/components/Banner.vue b/runatlantis.io/.vitepress/components/Banner.vue new file mode 100644 index 0000000000..f73c5b38d4 --- /dev/null +++ b/runatlantis.io/.vitepress/components/Banner.vue @@ -0,0 +1,89 @@ +<script setup lang="ts"> +import { useElementSize } from '@vueuse/core'; +import { ref, watchEffect } from 'vue'; +const el = ref<HTMLElement>(); +const { height } = useElementSize(el); +watchEffect(() => { + if (height.value) { + document.documentElement.style.setProperty( + '--vp-layout-top-height', + `${height.value + 16}px` + ); + } +}); +const dismiss = () => { + localStorage.setItem( + 'survey-banner', + (Date.now() + 8.64e7 * 1).toString() // current time + 1 day + ); + document.documentElement.classList.add('banner-dismissed'); +}; +</script> + +<template> + <!-- <div ref="el" class="banner"> + <div class="text"> + </div> + + <button type="button" @click="dismiss"> + <svg + xmlns="http://www.w3.org/2000/svg" + viewBox="0 0 20 20" + fill="currentColor" + > + <path + d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z" + /> + </svg> + </button> + </div> --> +</template> + +<style> +.banner-dismissed { + --vp-layout-top-height: 0px !important; +} +html { + --vp-layout-top-height: 88px; +} +@media (min-width: 375px) { + html { + --vp-layout-top-height: 64px; + } +} +@media (min-width: 768px) { + html { + --vp-layout-top-height: 40px; + } +} +</style> + +<style scoped> +.banner-dismissed .banner { + display: none; +} +.banner { + position: fixed; + top: 0; + right: 0; + left: 0; + z-index: var(--vp-z-index-layout-top); + padding: 8px; + text-align: center; + background: #383636; + color: #fff; + display: flex; + justify-content: space-between; +} +.text { + flex: 1; +} +a { + text-decoration: underline; +} +svg { + width: 20px; + height: 20px; + margin-left: 8px; +} +</style> diff --git a/runatlantis.io/.vitepress/components/shims.d.ts b/runatlantis.io/.vitepress/components/shims.d.ts new file mode 100644 index 0000000000..d1f3133128 --- /dev/null +++ b/runatlantis.io/.vitepress/components/shims.d.ts @@ -0,0 +1,5 @@ +declare module '*.vue' { + import type { DefineComponent } from 'vue'; + const component: DefineComponent; + export default component; +} diff --git a/runatlantis.io/.vitepress/config.ts b/runatlantis.io/.vitepress/config.ts new file mode 100644 index 0000000000..2a6ae1cf60 --- /dev/null +++ b/runatlantis.io/.vitepress/config.ts @@ -0,0 +1,133 @@ +import { generateSitemap as sitemap } from "sitemap-ts" +import footnote from 'markdown-it-footnote' +import { defineConfig } from 'vitepress'; +import * as navbars from "./navbars"; +import * as sidebars from "./sidebars"; +import { withMermaid } from "vitepress-plugin-mermaid"; + +// https://vitepress.dev/reference/site-config +const config = defineConfig({ + title: 'Atlantis', + description: 'Atlantis: Terraform Pull Request Automation', + lang: 'en-US', + lastUpdated: true, + locales: { + root: { + label: 'English', + lang: 'en-US', + themeConfig: { + nav: navbars.en, + sidebar: sidebars.en, + }, + }, + }, + themeConfig: { + // https://vitepress.dev/reference/default-theme-config + editLink: { + pattern: 'https://github.com/runatlantis/atlantis/edit/main/runatlantis.io/:path' + }, + // headline "depth" the right nav will show for its TOC + // + // https://vitepress.dev/reference/frontmatter-config#outline + outline: [2, 3], + search: { + provider: 'algolia', + options: { + // We internally discussed how this API key is exposed in the code and decided + // that it is a non-issue because this API key can easily be extracted by + // looking at the browser dev tools since the key is used in the API requests. + apiKey: '3b733dff1539ca3a210775860301fa86', + indexName: 'runatlantis', + appId: 'BH4D9OD16A', + locales: { + '/': { + placeholder: 'Search Documentation', + translations: { + button: { + buttonText: 'Search Documentation', + }, + }, + }, + }, + } + }, + socialLinks: [ + { icon: "slack", link: "https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw" }, + { icon: "twitter", link: "https://twitter.com/runatlantis" }, + { icon: "github", link: "https://github.com/runatlantis/atlantis" }, + ], + }, + // SEO Improvement - sitemap.xml & robots.txt + buildEnd: async ({ outDir }) => { + sitemap({ + hostname: "https://www.runatlantis.io/", + outDir: outDir, + generateRobotsTxt: true, + }) + }, + head: [ + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-196x196.png', sizes: '196x196' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-96x96.png', sizes: '96x96' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-32x32.png', sizes: '32x32' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-16x16.png', sizes: '16x16' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-128.png', sizes: '128x128' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '57x57', href: '/apple-touch-icon-57x57.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '114x114', href: '/apple-touch-icon-114x114.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '72x72', href: '/apple-touch-icon-72x72.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '144x144', href: '/apple-touch-icon-144x144.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '60x60', href: '/apple-touch-icon-60x60.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '120x120', href: '/apple-touch-icon-120x120.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '76x76', href: '/apple-touch-icon-76x76.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '152x152', href: '/apple-touch-icon-152x152.png' }], + ['meta', { name: 'msapplication-TileColor', content: '#FFFFFF' }], + ['meta', { name: 'msapplication-TileImage', content: '/mstile-144x144.png' }], + ['meta', { name: 'msapplication-square70x70logo', content: '/mstile-70x70.png' }], + ['meta', { name: 'msapplication-square150x150logo', content: '/mstile-150x150.png' }], + ['meta', { name: 'msapplication-wide310x150logo', content: '/mstile-310x150.png' }], + ['meta', { name: 'msapplication-square310x310logo', content: '/mstile-310x310.png' }], + ['link', { rel: 'stylesheet', sizes: '152x152', href: 'https://fonts.googleapis.com/css?family=Lato:400,900' }], + ['meta', { name: 'google-site-verification', content: 'kTnsDBpHqtTNY8oscYxrQeeiNml2d2z-03Ct9wqeCeE' }], + // google analytics + [ + 'script', + { async: '', src: 'https://www.googletagmanager.com/gtag/js?id=UA-6850151-3' } + ], + [ + 'script', + {}, + `window.dataLayer = window.dataLayer || []; + function gtag(){dataLayer.push(arguments);} + gtag('js', new Date()); + + gtag('config', 'UA-6850151-3');` + ], + [ + 'script', + { id: 'restore-banner-preference' }, + ` + (() => { + const restore = (key, cls, def = false) => { + const saved = localStorage.getItem(key); + if (saved ? saved !== 'false' && new Date() < saved : def) { + document.documentElement.classList.add(cls); + } + }; + restore('survey-banner', 'banner-dismissed'); + })();`, + ] + ], + markdown: { + config: (md) => { + md.use(footnote) + } + }, + vite: { + server: { + fs: { + cachedChecks: false, + }, + } + } +}) + +export default withMermaid(config) diff --git a/runatlantis.io/.vitepress/navbars.ts b/runatlantis.io/.vitepress/navbars.ts new file mode 100644 index 0000000000..bedf951053 --- /dev/null +++ b/runatlantis.io/.vitepress/navbars.ts @@ -0,0 +1,9 @@ +const en = [ + { text: "Home", link: "/" }, + { text: "Guide", link: "/guide" }, + { text: "Docs", link: "/docs" }, + { text: "Contributing", link: "/contributing" }, + { text: "Blog", link: "/blog" }, +]; + +export { en }; diff --git a/runatlantis.io/.vitepress/sidebars.ts b/runatlantis.io/.vitepress/sidebars.ts new file mode 100644 index 0000000000..5bcabfc4bc --- /dev/null +++ b/runatlantis.io/.vitepress/sidebars.ts @@ -0,0 +1,165 @@ +const en = [ + { + text: "Guide", + link: "/guide", + collapsed: false, + items: [ + { text: "Test Drive", link: "/guide/test-drive" }, + { text: "Testing locally", link: "/guide/testing-locally" }, + ], + }, + { + text: "Docs", + link: "/docs", + collapsed: true, + items: [ + { + text: "Installing Atlantis", + collapsed: true, + items: [ + { text: "Installing Guide", link: "/docs/installation-guide" }, + { text: "Requirements", link: "/docs/requirements" }, + { text: "Git Host Access Credentials", link: "/docs/access-credentials" }, + { text: "Webhook Secrets", link: "/docs/webhook-secrets" }, + { text: "Deployment", link: "/docs/deployment" }, + { text: "Configuring Webhooks", link: "/docs/configuring-webhooks" }, + { text: "Provider Credentials", link: "/docs/provider-credentials" }, + ] + }, + { + text: "Configuring Atlantis", + collapsed: true, + items: [ + { text: "Overview", link: "/docs/configuring-atlantis" }, + { text: "Server Configuration", link: "/docs/server-configuration" }, + { text: "Server Side Repo Config", link: "/docs/server-side-repo-config" }, + { text: "Pre Workflow Hooks", link: "/docs/pre-workflow-hooks" }, + { text: "Post Workflow Hooks", link: "/docs/post-workflow-hooks" }, + { text: "Conftest Policy Checking", link: "/docs/policy-checking" }, + { text: "Custom Workflows", link: "/docs/custom-workflows" }, + { text: "Repo and Project Permissions", link: "/docs/repo-and-project-permissions" }, + { text: "Repo Level atlantis.yaml", link: "/docs/repo-level-atlantis-yaml" }, + { text: "Upgrading atlantis.yaml", link: "/docs/upgrading-atlantis-yaml" }, + { text: "Command Requirements", link: "/docs/command-requirements" }, + { text: "Checkout Strategy", link: "/docs/checkout-strategy" }, + { text: "Terraform Versions", link: "/docs/terraform-versions" }, + { text: "Terraform Cloud", link: "/docs/terraform-cloud" }, + { text: "Using Slack Hooks", link: "/docs/using-slack-hooks" }, + { text: "Stats", link: "/docs/stats" }, + { text: "FAQ", link: "/docs/faq" }, + ] + }, + { + text: "Using Atlantis", + collapsed: true, + items: [ + { text: "Overview", link: "/docs/using-atlantis" }, + { text: "API endpoints", link: "/docs/api-endpoints" }, + ] + }, + { + text: 'How Atlantis Works', + collapsed: true, + items: [ + { text: 'Overview', link: '/docs/how-atlantis-works', }, + { text: 'Locking', link: '/docs/locking', }, + { text: 'Autoplanning', link: '/docs/autoplanning', }, + { text: 'Automerging', link: '/docs/automerging', }, + { text: 'Security', link: '/docs/security', }, + ] + }, + { + text: 'Real-time Terraform Logs', + link: '/docs/streaming-logs', + }, + { + text: 'Troubleshooting', + collapsed: true, + items: [ + { text: 'HTTPS, SSL, TLS', 'link': '/docs/troubleshooting-https', }, + ] + }, + ], + }, + { + text: "Contributing", + link: "/contributing", + collapsed: false, + items: [ + { + text: 'Implementation Details', + items: [ + { text: "Events Controller", link: "/contributing/events-controller" }, + ] + }, + { text: "Glossary", link: "/contributing/glossary" }, + ] + + }, + { + text: "Blog", + link: "/blog", + collapsed: false, + items: [ + { + text: "2024", + collapsed: true, + items: [ + { + text: "Integrating Atlantis with OpenTofu", + link: "/blog/2024/integrating-atlantis-with-opentofu" + }, + { + text: "Atlantis User Survey Results", + link: "/blog/2024/april-2024-survey-results" + }, + ] + }, + { + text: "2019", + collapsed: true, + items: [ + { + text: "4 Reasons To Try HashiCorp's (New) Free Terraform Remote State Storage", + link: "/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage" + }, + ] + }, + { + text: "2018", + collapsed: true, + items: [ + { + text: "I'm Joining HashiCorp!", + link: "/blog/2018/joining-hashicorp" + }, + { + text: "Putting The Dev Into DevOps: Why Your Developers Should Write Terraform Too", + link: "/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too" + }, + { + text: "Atlantis 0.4.4 Now Supports Bitbucket", + link: "/blog/2018/atlantis-0-4-4-now-supports-bitbucket" + }, + { + text: "Terraform And The Dangers Of Applying Locally", + link: "/blog/2018/terraform-and-the-dangers-of-applying-locally" + }, + { + text: "Hosting Our Static Site over SSL with S3, ACM, CloudFront and Terraform", + link: "/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform" + }, + ] + }, + { + text: "2017", + collapsed: true, + items: [ + { text: "Introducing Atlantis", link: "/blog/2017/introducing-atlantis" }, + ] + }, + ] + } +] + +export { en } diff --git a/runatlantis.io/.vitepress/theme/index.ts b/runatlantis.io/.vitepress/theme/index.ts new file mode 100644 index 0000000000..395964ae4b --- /dev/null +++ b/runatlantis.io/.vitepress/theme/index.ts @@ -0,0 +1,11 @@ +import DefaultTheme from "vitepress/theme"; +import { defineAsyncComponent, h } from 'vue'; + +export default { + ...DefaultTheme, + Layout() { + return h(DefaultTheme.Layout, null, { + 'layout-top': () => h(defineAsyncComponent(() => import('../components/Banner.vue'))) + }); + } +}; diff --git a/runatlantis.io/.vuepress/config.js b/runatlantis.io/.vuepress/config.js deleted file mode 100644 index 9658fc29a5..0000000000 --- a/runatlantis.io/.vuepress/config.js +++ /dev/null @@ -1,194 +0,0 @@ -import { googleAnalyticsPlugin } from '@vuepress/plugin-google-analytics' -import { docsearchPlugin } from '@vuepress/plugin-docsearch' -import { getDirname, path } from '@vuepress/utils' -import { defaultTheme, defineUserConfig } from 'vuepress' -import { sitemapPlugin } from 'vuepress-plugin-sitemap2'; - -const __dirname = getDirname(import.meta.url) - -export default defineUserConfig({ - alias: { - '@theme/Home.vue': path.resolve(__dirname, './theme/components/Home.vue'), - }, - locales: { - '/': { - lang: 'en-US', - title: 'Atlantis', - description: 'Atlantis: Terraform Pull Request Automation', - }, -/* - '/es/': { - lang: 'es-ES', - title: 'Atlantis', - description: 'Atlantis: AutomatizaciÃŗn de Pull Requests para Terraform', - }, -*/ - }, - plugins: [ - googleAnalyticsPlugin({ - id: 'UA-6850151-3', - }), - sitemapPlugin({ - hostname: 'https://runatlantis.io', - }), - docsearchPlugin({ - // We internally discussed how this API key is exposed in the code and decided - // that it is a non-issue because this API key can easily be extracted by - // looking at the browser dev tools since the key is used in the API requests. - apiKey: '3b733dff1539ca3a210775860301fa86', - indexName: 'runatlantis', - appId: 'BH4D9OD16A', - locales: { - '/': { - placeholder: 'Search Documentation', - translations: { - button: { - buttonText: 'Search Documentation', - }, - }, - }, - }, - }), - ], - head: [ - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-196x196.png', sizes: '196x196' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-96x96.png', sizes: '96x96' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-32x32.png', sizes: '32x32' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-16x16.png', sizes: '16x16' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-128.png', sizes: '128x128' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '57x57', href: '/apple-touch-icon-57x57.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '114x114', href: '/apple-touch-icon-114x114.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '72x72', href: '/apple-touch-icon-72x72.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '144x144', href: '/apple-touch-icon-144x144.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '60x60', href: '/apple-touch-icon-60x60.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '120x120', href: '/apple-touch-icon-120x120.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '76x76', href: '/apple-touch-icon-76x76.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '152x152', href: '/apple-touch-icon-152x152.png' }], - ['meta', { name: 'msapplication-TileColor', content: '#FFFFFF' }], - ['meta', { name: 'msapplication-TileImage', content: '/mstile-144x144.png' }], - ['meta', { name: 'msapplication-square70x70logo', content: '/mstile-70x70.png' }], - ['meta', { name: 'msapplication-square150x150logo', content: '/mstile-150x150.png' }], - ['meta', { name: 'msapplication-wide310x150logo', content: '/mstile-310x150.png' }], - ['meta', { name: 'msapplication-square310x310logo', content: '/mstile-310x310.png' }], - ['link', { rel: 'stylesheet', sizes: '152x152', href: 'https://fonts.googleapis.com/css?family=Lato:400,900' }], - ['meta', { name: 'google-site-verification', content: 'kTnsDBpHqtTNY8oscYxrQeeiNml2d2z-03Ct9wqeCeE' }], - ], - themePlugins: { - activeHeaderLinks: false, - }, - theme: defaultTheme({ - docsBranch: "main", - logo: '/hero.png', - locales: { - '/': { - selectLanguageName: 'English', - navbar: [ - { text: 'Home', link: '/' }, - { text: 'Guide', link: '/guide/' }, - { text: 'Docs', link: '/docs/' }, - { text: 'Blog', link: 'https://medium.com/runatlantis' }, - ], - }, -/* - '/es/': { - selectLanguageName: 'Spanish', - navbar: [ - { text: 'Home', link: '/es/' }, - { text: 'Guide', link: '/es/guide/' }, - { text: 'Docs', link: '/es/docs/' }, - { text: 'Blog', link: 'https://medium.com/runatlantis' }, - ], - }, -*/ - }, - sidebar: { - '/guide/': [ - '', - 'test-drive', - 'testing-locally', - ], - '/docs/': [ - { - text: 'Installing Atlantis', - collapsible: true, - children: [ - 'installation-guide', - 'requirements', - 'access-credentials', - 'webhook-secrets', - 'deployment', - 'configuring-webhooks', - 'provider-credentials', - ] - }, - { - text: 'Configuring Atlantis', - collapsible: true, - children: [ - { - text: 'Overview', - link: 'configuring-atlantis', - }, - 'server-configuration', - 'server-side-repo-config', - 'pre-workflow-hooks', - 'post-workflow-hooks', - 'policy-checking', - 'custom-workflows', - 'repo-level-atlantis-yaml', - 'upgrading-atlantis-yaml', - 'command-requirements', - 'checkout-strategy', - 'terraform-versions', - 'terraform-cloud', - 'using-slack-hooks', - 'stats', - 'faq', - ] - }, - { - text: 'Using Atlantis', - collapsible: true, - children: [ - { - text: 'Overview', - link: 'using-atlantis', - }, - 'api-endpoints', - ] - }, - { - text: 'How Atlantis Works', - collapsible: true, - children: [ - { - text: 'Overview', - link: 'how-atlantis-works', - }, - 'locking', - 'autoplanning', - 'automerging', - 'security', - ] - }, - { - text: 'Real-time Terraform Logs', - collapsible: true, - children: [ - 'streaming-logs', - ] - }, - { - text: 'Troubleshooting', - collapsible: true, - children: [ - 'troubleshooting-https', - ] - } - ] - }, - repo: 'runatlantis/atlantis', - docsDir: 'runatlantis.io', - editLink: true, - }) -}) diff --git a/runatlantis.io/.vuepress/public/_redirects b/runatlantis.io/.vuepress/public/_redirects deleted file mode 100644 index a025dc528b..0000000000 --- a/runatlantis.io/.vuepress/public/_redirects +++ /dev/null @@ -1,2 +0,0 @@ -/guide/getting-started.html /guide/ -/docs/atlantis-yaml-reference.html /docs/repo-level-atlantis-yaml.html diff --git a/runatlantis.io/.vuepress/public/certificate.svg b/runatlantis.io/.vuepress/public/certificate.svg deleted file mode 100644 index 17df5278b1..0000000000 --- a/runatlantis.io/.vuepress/public/certificate.svg +++ /dev/null @@ -1,59 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<!-- Generator: Adobe Illustrator 18.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> -<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> -<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" - viewBox="0 0 64 64" enable-background="new 0 0 64 64" xml:space="preserve"> -<g> - <g> - <g> - <path fill="#3BCC9C" d="M32,0c17.7,0,32,14.3,32,32S49.7,64,32,64S0,49.7,0,32S14.3,0,32,0z"/> - </g> - </g> - <g> - <g> - <path fill="#FFFFFF" d="M13.3,44c-0.7,0-1.3-0.6-1.3-1.2V17.2c0-0.7,0.6-1.2,1.3-1.2h37.4c0.7,0,1.3,0.6,1.3,1.2v25.5 - c0,0.7-0.6,1.2-1.3,1.2H13.3z"/> - </g> - </g> - <g> - <g> - <path fill="#D3D5DD" d="M45,20c0.5,0,1,0.5,1,1s-0.5,1-1,1H18c-0.5,0-1-0.5-1-1s0.5-1,1-1H45z"/> - </g> - </g> - <g> - <g> - <path fill="#D3D5DD" d="M45,25c0.5,0,1,0.5,1,1s-0.5,1-1,1H18c-0.5,0-1-0.5-1-1s0.5-1,1-1H45z"/> - </g> - </g> - <g> - <g> - <polygon fill="#E84B4B" points="44,43 44,44 44,53 40.1,50.8 36,53 36,44 36,43 "/> - </g> - </g> - <g> - <g> - <path fill="#316CFF" d="M26,35c0.5,0,1,0.4,1,1c0,0.5-0.5,1-1,1h-8c-0.5,0-1-0.5-1-1c0-0.6,0.5-1,1-1H26z"/> - </g> - </g> - <g> - <g> - <path fill="#FFC533" d="M44,43.2c-0.1,0.1-0.1,0.1-0.2,0.2l-0.5,0.8c-0.4,0.6-1.2,0.9-1.9,0.6l-0.8-0.3c-0.4-0.1-0.8-0.1-1.1,0 - l-0.8,0.3c-0.7,0.3-1.5,0-1.9-0.6l-0.5-0.8c0-0.1-0.1-0.1-0.2-0.2c-0.2-0.2-0.4-0.4-0.7-0.4l-0.9-0.3c-0.7-0.2-1.2-0.9-1.1-1.6 - l0-1.1c0-0.4-0.1-0.7-0.3-1L32.3,38c-0.4-0.6-0.4-1.3,0-1.9l0.6-0.9c0.2-0.3,0.3-0.6,0.3-1l0-1.1c0-0.7,0.4-1.4,1.1-1.6l0.9-0.3 - c0.4-0.1,0.7-0.3,0.9-0.7l0.5-0.8c0.4-0.6,1.2-0.9,1.9-0.6l0.8,0.3c0.4,0.1,0.8,0.1,1.1,0l0.8-0.3c0.7-0.3,1.5,0,1.9,0.6l0.5,0.8 - c0.2,0.3,0.5,0.5,0.9,0.7l0.9,0.3c0.7,0.2,1.2,0.9,1.1,1.6l0,1.1c0,0.4,0.1,0.7,0.3,1l0.6,0.9c0.4,0.6,0.4,1.3,0,1.9L47,38.8 - c-0.2,0.3-0.3,0.6-0.3,1l0,1.1c0,0.7-0.4,1.4-1.1,1.6l-0.9,0.3C44.4,42.9,44.2,43,44,43.2z"/> - </g> - </g> - <g> - <g> - <path fill="#F2AC0F" d="M40,32c2.8,0,5,2.2,5,5s-2.2,5-5,5s-5-2.2-5-5S37.2,32,40,32z"/> - </g> - </g> - <g> - <g> - <path fill="#FCEA81" d="M40,34c1.7,0,3,1.3,3,3s-1.3,3-3,3s-3-1.3-3-3S38.3,34,40,34z"/> - </g> - </g> -</g> -</svg> diff --git a/runatlantis.io/.vuepress/public/checkmark.svg b/runatlantis.io/.vuepress/public/checkmark.svg deleted file mode 100644 index ccdc2f7404..0000000000 --- a/runatlantis.io/.vuepress/public/checkmark.svg +++ /dev/null @@ -1,12 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<!-- Generator: Adobe Illustrator 21.0.2, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> -<svg version="1.1" id="Build-Stage-Passed" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" - y="0px" viewBox="0 0 15 15" style="enable-background:new 0 0 15 15;" xml:space="preserve"> -<style type="text/css"> - .st0{fill:none;stroke:#39AA56;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;} -</style> -<g id="stage-passed-icon"> - <circle class="st0" cx="7.5" cy="7.5" r="7"/> - <polyline class="st0" points="10.63,4.711 7.064,10.213 4.294,8.012 "/> -</g> -</svg> diff --git a/runatlantis.io/.vuepress/public/coding.svg b/runatlantis.io/.vuepress/public/coding.svg deleted file mode 100644 index 1f67eec776..0000000000 --- a/runatlantis.io/.vuepress/public/coding.svg +++ /dev/null @@ -1,42 +0,0 @@ -<?xml version="1.0" encoding="iso-8859-1"?> -<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> -<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" - viewBox="0 0 512 512" style="enable-background:new 0 0 512 512;" xml:space="preserve"> -<circle style="fill:#F33052;" cx="256" cy="256" r="256"/> -<g> - <path style="fill:#FFFFFF;" d="M213.12,319.776L99.872,270.544V243.28l113.248-49.008v32.112l-79.008,30.208l79.008,31.328V319.776 - z"/> - <path style="fill:#FFFFFF;" d="M223.6,341.408l40.912-170.832h23.776l-41.36,170.832H223.6z"/> - <path style="fill:#FFFFFF;" d="M298.768,319.904V288l79.104-31.104l-79.104-30.752V194.48l113.36,49.008v27.04L298.768,319.904z"/> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -</svg> diff --git a/runatlantis.io/.vuepress/public/list.svg b/runatlantis.io/.vuepress/public/list.svg deleted file mode 100644 index 8c5735e658..0000000000 --- a/runatlantis.io/.vuepress/public/list.svg +++ /dev/null @@ -1,49 +0,0 @@ -<?xml version="1.0" encoding="iso-8859-1"?> -<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> -<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" - viewBox="0 0 58 58" style="enable-background:new 0 0 58 58;" xml:space="preserve"> -<polygon style="fill:#EDEADA;" points="51.5,14 37.5,0 6.5,0 6.5,58 51.5,58 "/> -<polygon style="fill:#CEC9AE;" points="37.5,0 37.5,14 51.5,14 "/> -<path style="fill:#CEC9AE;" d="M41.5,22h-16c-0.552,0-1-0.447-1-1s0.448-1,1-1h16c0.552,0,1,0.447,1,1S42.052,22,41.5,22z"/> -<path style="fill:#14A085;" d="M17.5,23c-0.257,0-0.514-0.099-0.708-0.293l-2-2c-0.391-0.391-0.391-1.023,0-1.414 - s1.023-0.391,1.414,0l1.367,1.367l4.301-3.441c0.43-0.345,1.061-0.275,1.405,0.156c0.345,0.432,0.275,1.061-0.156,1.406l-5,4 - C17.941,22.928,17.72,23,17.5,23z"/> -<path style="fill:#CEC9AE;" d="M41.5,33h-16c-0.552,0-1-0.447-1-1s0.448-1,1-1h16c0.552,0,1,0.447,1,1S42.052,33,41.5,33z"/> -<path style="fill:#14A085;" d="M17.5,34c-0.257,0-0.514-0.099-0.708-0.293l-2-2c-0.391-0.391-0.391-1.023,0-1.414 - s1.023-0.391,1.414,0l1.367,1.367l4.301-3.441c0.43-0.345,1.061-0.275,1.405,0.156c0.345,0.432,0.275,1.061-0.156,1.406l-5,4 - C17.941,33.928,17.72,34,17.5,34z"/> -<path style="fill:#CEC9AE;" d="M41.5,44h-16c-0.552,0-1-0.447-1-1s0.448-1,1-1h16c0.552,0,1,0.447,1,1S42.052,44,41.5,44z"/> -<path style="fill:#14A085;" d="M17.5,45c-0.257,0-0.514-0.099-0.708-0.293l-2-2c-0.391-0.391-0.391-1.023,0-1.414 - s1.023-0.391,1.414,0l1.367,1.367l4.301-3.441c0.43-0.345,1.061-0.275,1.405,0.156c0.345,0.432,0.275,1.061-0.156,1.406l-5,4 - C17.941,44.928,17.72,45,17.5,45z"/> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -<g> -</g> -</svg> diff --git a/runatlantis.io/.vuepress/public/mobile-workflow-min.png b/runatlantis.io/.vuepress/public/mobile-workflow-min.png deleted file mode 100644 index b8eea33cc2..0000000000 Binary files a/runatlantis.io/.vuepress/public/mobile-workflow-min.png and /dev/null differ diff --git a/runatlantis.io/.vuepress/public/powerful.svg b/runatlantis.io/.vuepress/public/powerful.svg deleted file mode 100644 index e179434b71..0000000000 --- a/runatlantis.io/.vuepress/public/powerful.svg +++ /dev/null @@ -1,18 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<!-- Generator: Adobe Illustrator 18.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> -<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> -<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" - viewBox="0 0 64 64" enable-background="new 0 0 64 64" xml:space="preserve"> -<g> - <g> - <g> - <path fill="#5E5C89" d="M32,0c17.7,0,32,14.3,32,32S49.7,64,32,64S0,49.7,0,32S14.3,0,32,0z"/> - </g> - </g> - <g> - <g> - <polygon fill="#FFD833" points="44,27 37,27 41,15 27,15 25,32 29.6,32 24,49 "/> - </g> - </g> -</g> -</svg> diff --git a/runatlantis.io/.vuepress/public/workflow-min.png b/runatlantis.io/.vuepress/public/workflow-min.png deleted file mode 100644 index 1c9e383c70..0000000000 Binary files a/runatlantis.io/.vuepress/public/workflow-min.png and /dev/null differ diff --git a/runatlantis.io/.vuepress/styles/index.scss b/runatlantis.io/.vuepress/styles/index.scss deleted file mode 100644 index 6daf19f232..0000000000 --- a/runatlantis.io/.vuepress/styles/index.scss +++ /dev/null @@ -1,243 +0,0 @@ -// https://v2.vuepress.vuejs.org/reference/default-theme/styles.html#style-file - -// colors -$textColor: var(--c-text); -$borderColor: var(--c-border); -$buttonTextColor: var(--c-badge-danger-text); -$buttonColor: var(--c-brand); -$buttonHoverColor: var(--c-brand-light); -$darkBackground: var(--c-bg-light); -$darkBackgroundBorder: var(--c-border-dark); - -// layout -$navbarHeight: 3.6rem; -$sidebarWidth: 20rem; -$contentWidth: 740px; - -// responsive breakpoints -$MQNarrow: 959px; -$MQMobile: 719px; -$MQMobileNarrow: 419px; - -$homeWidth: 960px; - -.container { - padding-top: 3.6rem; -} - -.home { - padding: 0 2rem; - max-width: $homeWidth; - margin: 0px auto 80px; - .hero { - text-align: center; - img { - max-height: 280px; - display: block; - margin: 3rem auto 1.5rem; - } - h1 { - font-size: 3rem; - } - h1, .description, .action { - margin: 1.8rem auto; - } - .description { - max-width: 35rem; - font-family: Lato, sans-serif; - font-size: 1.9rem; - line-height: 1.3; - } - .action { - display: inline; - } - .action-button { - display: inline-block; - font-size: 1.2rem; - color: $buttonTextColor; - cursor: pointer; - background-color: $buttonColor; - padding: 0.8rem 1.6rem; - border-radius: 4px; - transition: background-color .1s ease; - box-sizing: border-box; - margin: 0 10px; - &:hover { - background-color: $buttonHoverColor; - } - } - } - h2 { - border-bottom: none; - } - .features { - border-top: 1px solid $borderColor; - padding: 1.2rem 0; - margin-top: 0; - } - .footer { - padding: 2.5rem; - border-top: 1px solid $borderColor; - text-align: center; - } -} - -.getting-started-footer { - padding: 2.5rem 0; - margin: 0 auto; -} - -.workflow-container { - border-top: 2px solid $borderColor; -} - -.workflow { - text-align: center; - margin: 80px auto; - max-width: $homeWidth; - img { - width: 100%; - } - .mobile { - display: none; - } -} - -.benefits-container { - border-top: 1px solid $darkBackgroundBorder; - .benefit-container { - border-bottom: 1px solid $darkBackgroundBorder; - .title { - padding-top: 40px; - text-align: center; - } - &.-dark { - background-color: $darkBackground; - } - .benefit { - max-width: $homeWidth; - margin: 0 auto; - display: flex; - flex-flow: row wrap; - align-items: center; - .item { - flex-basis: 50%; - flex-grow: 1; - min-width: 250px; - .image { - padding: 40px; - text-align: center; - img { - max-height: 200px; - } - } - } - .description { - padding: 40px; - h2 { - border: none; - } - ul { - list-style-type: none; - padding-left: 0; - } - li { - display: flex; - align-items: center; - line-height: 25px; - margin-bottom: 20px; - } - .checkmark { - width: 20px; - margin-right: 10px; - vertical-align: middle; - align-self: baseline; - padding-top: 5px; - } - } - } - } -} - -@media (max-width: $MQMobile) { - .workflow { - .mobile { - display: block; - } - .desktop { - display: none; - } - } - - .benefits-container { - .benefit-container { - .benefit { - flex-direction: column; - .item { - &.image { - order: -1; - } - } - } - } - } -} - -@media (max-width: $MQMobileNarrow) { - .home { - padding-left: 1.5rem; - padding-right: 1.5rem; - .hero { - img { - max-height: 210px; - margin: 2rem auto 1.2rem; - } - h1 { - font-size: 2rem; - } - h1, .description, .action { - margin: 1.2rem auto; - } - .description { - font-size: 1.2rem; - } - .action-button { - font-size: 1rem; - padding: 0.6rem 1.2rem; - } - } - } -} - -.theme-container { - &.home-custom { - .hero { - h1 { - font-size: 64px; - font-family: Lato, sans-serif; - font-weight: 900; - } - img { - height: 200px; - } - } - p { - &.description { - position: relative; - &:before { - position: absolute; - content: ''; - width: 40px; - height: 3px; - top: -19px; - left: 50%; - margin-left: -20px; - background: #f36; - } - } - } - } -} -.sidebar-heading { - font-size: inherit; -} diff --git a/runatlantis.io/.vuepress/styles/palette.scss b/runatlantis.io/.vuepress/styles/palette.scss deleted file mode 100644 index 7f406d4555..0000000000 --- a/runatlantis.io/.vuepress/styles/palette.scss +++ /dev/null @@ -1,4 +0,0 @@ -$accentColor: #0074db; -$textColor: #2c3e50; -$borderColor: #eaecef; -$codeBgColor: #282c34; diff --git a/runatlantis.io/.vuepress/theme/components/Home.vue b/runatlantis.io/.vuepress/theme/components/Home.vue deleted file mode 100644 index 271a574c85..0000000000 --- a/runatlantis.io/.vuepress/theme/components/Home.vue +++ /dev/null @@ -1,175 +0,0 @@ -<template> - <div class="container"> - <div class="home"> - <div class="hero"> - <img v-if="data.heroImage" :src="$withBase(data.heroImage)" alt="hero"> - <h1>{{ data.heroText || $title || 'Hello' }}</h1> - <p class="description"> - Terraform Pull Request Automation - </p> - <p class="action" v-if="data.actionText && data.actionLink"> - <a href="/guide/" class="nav-link action-button">Get Started →</a> - </p> - </div> - </div> - <div class="workflow-container"> - <div class="workflow"> - <h1>The Atlantis Workflow</h1> - <img src="/mobile-workflow-min.png" class="mobile" alt="Atlantis Workflow"> - <img src="/workflow-min.png" class="desktop" alt="Atlantis Workflow"> - </div> - </div> - <div class="benefits-container"> - <div class="benefit-container -dark"> - <div class="title"> - <h1>Benefits</h1> - </div> - <div class="benefit"> - <div class="item"> - <div class="description"> - <h2>Fewer Mistakes</h2> - <p>Bring the benefits of <strong>code review</strong> to your operations - workflow.</p> - <ul> - <li><img class="checkmark" src="/checkmark.svg">Catch errors in - the Terraform plan output before it's applied. - </li> - <li><img class="checkmark" src="/checkmark.svg">Ensure that you - apply changes before merging to main. - </li> - </ul> - </div> - </div> - <div class="item image"> - <div class="image"> - <img src="/list.svg"> - </div> - </div> - </div> - </div> - <div class="benefit-container"> - <div class="benefit"> - <div class="item image"> - <div class="image"> - <img src="/coding.svg"> - </div> - </div> - <div class="item"> - <div class="description"> - <h2>Put the <span style="text-decoration: underline">Dev</span> back into DevOps</h2> - <p>Empower your developers to write Terraform. <strong>Safely.</strong></p> - <ul> - <li><img class="checkmark" src="/checkmark.svg">Developers can - submit Terraform pull requests without needing credentials. - </li> - <li><img class="checkmark" src="/checkmark.svg">Operators can - require approvals prior to allowing an apply. - </li> - </ul> - </div> - </div> - </div> - </div> - <div class="benefit-container -dark"> - <div class="benefit"> - <div class="item"> - <div class="description"> - <h2>Instant Audit Logs And Compliance</h2> - <p>Pass audits without compromising your workflow.</p> - <ul> - <li><img class="checkmark" src="/checkmark.svg">Each pull request now holds a detailed log of what infrastructure changes were made and when; along with who made the change and who approved it. - </li> - <li><img class="checkmark" src="/checkmark.svg">Atlantis can be configured to require approvals on every production change. - </li> - </ul> - </div> - </div> - <div class="item image"> - <div class="image"> - <img src="/certificate.svg"> - </div> - </div> - </div> - </div> - </div> - <div class="benefits-container"> - <div class="benefit-container"> - <h1 class="title">Proven at Scale</h1> - <div class="benefit"> - <div class="item image"> - <div class="image"> - <img src="/powerful.svg"> - </div> - </div> - <div class="item"> - <div class="description"> - <ul> - <li><img class="checkmark" src="/checkmark.svg">Used by one of the world's top companies to manage over 600 Terraform repos with 300 developers. - </li> - <li><img class="checkmark" src="/checkmark.svg">In production use since 2017. - </li> - </ul> - </div> - </div> - </div> - </div> - </div> - <div class="benefits-container"> - <div class="benefit-container"> - <h1 class="title">How It Works</h1> - <div class="benefit"> - <div class="item"> - <div class="description"> - <ul> - <li><img class="checkmark" src="/checkmark.svg">Atlantis is - self-hosted. Your credentials don't leave your infrastructure. - </li> - <li><img class="checkmark" src="/checkmark.svg">Runs as a Golang - binary or Docker image and can be deployed on VMs, Kubernetes, - Fargate, etc. - </li> - <li><img class="checkmark" src="/checkmark.svg">Listens for - webhooks from GitHub/GitLab/Bitbucket/Azure DevOps. - </li> - <li><img class="checkmark" src="/checkmark.svg">Runs terraform - commands remotely and comments back with their output. - </li> - </ul> - </div> - </div> - <div class="item image"> - <div class="image"> - <img src="/hero.png"> - </div> - </div> - </div> - </div> - </div> - <div class="home getting-started-footer"> - <div class="hero"> - <p class="action" v-if="data.actionText && data.actionLink"> - <a href="/guide/" class="nav-link action-button">Get Started →</a> - </p> - </div> - </div> - <div class="footer" v-if="data.footer"> - {{ data.footer }} - </div> - </div> -</template> - -<script> - export default { - computed: { - data() { - return this.$page.frontmatter - }, - actionLink() { - return { - link: this.data.actionLink, - text: this.data.actionText - } - } - } - } -</script> diff --git a/runatlantis.io/.vuepress/theme/index.js b/runatlantis.io/.vuepress/theme/index.js deleted file mode 100644 index 85ad504429..0000000000 --- a/runatlantis.io/.vuepress/theme/index.js +++ /dev/null @@ -1,6 +0,0 @@ -// introduce custom home with navbar -// https://stackoverflow.com/a/60220684 -// https://vuepress.vuejs.org/theme/inheritance.html#usage -module.exports = { - extend: '@vuepress/theme-default' -} diff --git a/runatlantis.io/README.md b/runatlantis.io/README.md deleted file mode 100644 index 5772c9faf7..0000000000 --- a/runatlantis.io/README.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -home: true -pageClass: home-custom -heroImage: /hero.png -heroText: Atlantis -actionText: Get Started → -actionLink: /guide/ -title: Terraform Pull Request Automation ---- diff --git a/runatlantis.io/blog.md b/runatlantis.io/blog.md new file mode 100644 index 0000000000..6ad783293c --- /dev/null +++ b/runatlantis.io/blog.md @@ -0,0 +1,28 @@ +--- +title: Welcome to Our Blog +aside: false +--- + +# Welcome to Our Blog + +We are thrilled to have you here! Our blog is a collection of insightful articles, tips, and updates from our team. Whether you're new or have been following us for a while, there's always something new to learn and explore. + +### Explore Our Popular Posts + +We have a rich history of blog posts dating back to 2017-2019. Here are some of our popular posts: + +- [4 Reasons To Try HashiCorp's (New) Free Terraform Remote State Storage](/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage) +- [I'm Joining HashiCorp!](/blog/2018/joining-hashicorp) +- [Putting The Dev Into DevOps: Why Your Developers Should Write Terraform Too](/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too) +- [Atlantis 0.4.4 Now Supports Bitbucket](/blog/2018/atlantis-0-4-4-now-supports-bitbucket) +- [Terraform And The Dangers Of Applying Locally](/blog/2018/terraform-and-the-dangers-of-applying-locally) +- [Hosting Our Static Site over SSL with S3, ACM, CloudFront and Terraform](/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform) +- [Introducing Atlantis](/blog/2017/introducing-atlantis) + +### Welcoming New Blog Authors + +We are excited to welcome new authors to our blog. Our diverse team brings a wealth of knowledge and experience to share with our readers. Stay tuned for fresh perspectives and in-depth articles on the latest trends and technologies. + +If you have any questions or topics you would like us to cover, feel free to reach out [on Slack](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw). We are always looking to engage with our community and provide valuable content. + +Happy reading! diff --git a/runatlantis.io/blog/2017/introducing-atlantis.md b/runatlantis.io/blog/2017/introducing-atlantis.md new file mode 100644 index 0000000000..2957d994e7 --- /dev/null +++ b/runatlantis.io/blog/2017/introducing-atlantis.md @@ -0,0 +1,113 @@ +--- +title: Introducing Atlantis +lang: en-US +--- + +# Introducing Atlantis + +::: info +This post was originally written on September 11th, 2017 + +Original post: <https://medium.com/runatlantis/introducing-atlantis-6570d6de7281> +::: + +We're very excited to announce the open source release of Atlantis! Atlantis is a tool for +collaborating on Terraform that's been in use at Hootsuite for over a year. The core +functionality of Atlantis enables developers and operators to run `terraform plan` and +`apply` directly from Terraform pull requests. Atlantis then comments back on the pull +request with the output of the commands: + +![](/blog/intro/intro1.gif) + +This is a simple feature, however it has had a massive effect on how our team writes Terraform. +By bringing a Terraform workflow to pull requests, Atlantis helped our Ops team collaborate +better on Terraform and also enabled our entire development team to write and execute Terraform safely. + +Atlantis was built to solve two problems that arose at Hootsuite as we adopted Terraform: + +### 1. Effective Collaboration + +What's the best way to collaborate on Terraform in a team setting? + +### 2. Developers Writing Terraform + +How can we enable our developers to write and apply Terraform safely? + +## Effective Collaboration + +When writing Terraform, there are a number of workflows you can follow. The simplest workflow is just using `master`: + +![](/blog/intro/intro2.webp) + +In this workflow, you work on `master` and run `terraform` locally. +The problem with this workflow is that there is no collaboration or code review. +So we start to use pull requests: + +![](/blog/intro/intro3.webp) + +We still run `terraform plan` locally, but once we're satisfied with the changes we create a pull request for review. When the pull request is approved, we run `apply` locally. + +This workflow is an improvement, but there are still problems. The first problem is that it's hard to review just the diff on the pull request. To properly review a change, you really need to see the output from `terraform plan`. + +![](/blog/intro/intro4.webp) + +What looks like a small change... + +![](/blog/intro/intro5.webp) + +...can have a big plan + +The second problem is that now it's easy for `master` to get out of sync with what's actually been applied. This can happen if you merge a pull request without running `apply` or if the `apply` has an error halfway through, you forget to fix it and then you merge to `master`. Now what's in `master` isn't actually what's running on production. At best, this causes confusion the next time someone runs `terraform plan`. At worst, it causes an outage when someone assumes that what's in `master` is actually running, and depends on it. + +With the Atlantis workflow, these problems are solved: + +![](/blog/intro/intro6.webp) + +Now it's easy to review changes because you see the `terraform plan` output on the pull request. + +![](/blog/intro/intro7.webp) + +Pull requests are easy to review since you can see the plan + +It's also easy to ensure that the pull request is `terraform apply`'d before merging to master because you can see the actual `apply` output on the pull request. + +![](/blog/intro/intro8.webp) + +So, Atlantis makes working on Terraform within an operations team much easier, but how does it help with getting your whole team to write Terraform? + +## Developers Writing Terraform + +Terraform usually starts out being used by the Ops team. As a result of using Terraform, the Ops team becomes much faster at making infrastructure changes, but the way developers request those changes remains the same: they use a ticketing system or chat to ask operations for help, the request goes into a queue and later Ops responds that the task is complete. + +Soon however, the Ops team starts to realize that it's possible for developers to make some of these Terraform changes themselves! There are some problems that arise though: + +- Developers don't have the credentials to actually run Terraform commands +- If you give them credentials, it's hard to review what is actually being applied + +With Atlantis, these problems are solved. All `terraform plan` and `apply` commands are run from the pull request. This means developers don't need to have any credentials to run Terraform locally. Of course, this can be dangerous: how can you ensure developers (who might be new to Terraform) aren't applying things they shouldn't? The answer is code reviews and approvals. + +Since Atlantis comments back with the `plan` output directly on the pull request, it's easy for an operations engineer to review exactly what changes will be applied. And Atlantis can run in `require-approval` mode, that will require a GitHub pull request approval before allowing `apply` to be run: + +![](/blog/intro/intro9.webp) + +With Atlantis, developers are able to write and apply Terraform safely. They submit pull requests, can run `atlantis plan` until their change looks good and then get approval from Ops to `apply`. + +Since the introduction of Atlantis at Hootsuite, we've had **78** contributors to our Terraform repositories, **58** of whom are developers (**75%**). + +## Where we are now + +Since the introduction of Atlantis at Hootsuite we've grown to 144 Terraform repositories [^1] that manage thousands of Amazon resources. Atlantis is used for every single Terraform change throughout our organization. + +## Getting started with Atlantis + +If you'd like to try out Atlantis for your team you can download the latest release from <https://github.com/runatlantis/atlantis/releases>. If you run `atlantis testdrive` you can get started in less than 5 minutes. To read more about Atlantis go to <https://www.runatlantis.io/>. + +Check out our video for more information: + +<iframe src="https://cdn.embedly.com/widgets/media.html?src=https%3A%2F%2Fwww.youtube.com%2Fembed%2FTmIPWda0IKg%3Ffeature%3Doembed&url=http%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3DTmIPWda0IKg&image=https%3A%2F%2Fi.ytimg.com%2Fvi%2FTmIPWda0IKg%2Fhqdefault.jpg&key=a19fcc184b9711e1b4764040d3dc5c07&type=text%2Fhtml&schema=youtube" allowfullscreen="" frameborder="0" height="480" width="640" title="Atlantis Walkthrough" class="fr n gh dv bg" scrolling="no"></iframe> + +[^1]: We split our Terraform up into multiple states, each with its own repository (see [1], [2], [3]). + +[1]: https://blog.gruntwork.io/how-to-manage-terraform-state-28f5697e68fa +[2]: https://charity.wtf/2016/03/30/terraform-vpc-and-why-you-want-a-tfstate-file-per-env/ +[3]: https://www.nclouds.com/blog/terraform-multi-state-management/ diff --git a/runatlantis.io/blog/2018/atlantis-0-4-4-now-supports-bitbucket.md b/runatlantis.io/blog/2018/atlantis-0-4-4-now-supports-bitbucket.md new file mode 100644 index 0000000000..ce6e39f0cb --- /dev/null +++ b/runatlantis.io/blog/2018/atlantis-0-4-4-now-supports-bitbucket.md @@ -0,0 +1,102 @@ +--- +title: Atlantis 0.4.4 Now Supports Bitbucket +lang: en-US +--- + +# Atlantis 0.4.4 Now Supports Bitbucket + +::: info +This post was originally written on July 25th, 2018 + +Original post: <https://medium.com/runatlantis/atlantis-0-4-4-now-supports-bitbucket-86c53a550b45> +::: + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic1.webp) + +Atlantis is an [open source](https://github.com/runatlantis/atlantis) platform for using Terraform in teams. I'm happy to announce that the [latest release](https://github.com/runatlantis/atlantis/releases) of Atlantis (0.4.4) now supports both Bitbucket Cloud (bitbucket.org) **and** Bitbucket Server (aka Stash). + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic2.gif) + +Atlantis now supports the three major Git hosts: GitHub, GitLab and Bitbucket. The rest of this post will talk about how to use Atlantis with Bitbucket. + +## What is Atlantis? + +Atlantis is a self-hosted application that listens for Terraform pull request events via webhooks. It runs `terraform plan` and `apply` remotely and comments back on the pull request with the output. + +With Atlantis, you collaborate on the Terraform pull request itself instead of running `terraform apply` from your own computers which can be dangerous: + +Check out <www.runatlantis.io> for more information. + +## Getting Started + +The easiest way to try out Atlantis with Bitbucket is to run Atlantis locally on your own computer. Eventually you'll want to deploy it as a standalone app but this is the easiest way to try it out. Follow [these instructions](https://www.runatlantis.io/guide/getting-started.html) to get Atlantis running locally. + +Create a Pull Request +If you've got the Atlantis webhook configured for your repository and Atlantis is running, it's time to create a new pull request. I recommend adding a `null_resource` to one of your Terraform files for the the test pull request. It won't actually create anything so it's safe to use as a test. + +Using the web editor, open up one of your Terraform files and add: + +```tf +resource "null_resource" "example" {} +``` + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic3.webp) + +Click Commit and select **Create a pull request for this change**. + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic4.webp) + +Wait a few seconds and then refresh. Atlantis should have automatically run `terraform plan` and commented back on the pull request: + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic5.webp) + +Now it's easier for your colleagues to review the pull request because they can see the `terraform plan` output. + +### Terraform Apply + +Since all we're doing is adding a null resource, I think it's safe to run `terraform apply`. To do so, I add a comment to the pull request: `atlantis apply`: + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic6.webp) + +Atlantis is listening for pull request comments and will run `terraform apply` remotely and comment back with the output: + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic7.webp) + +### Pull Request Approvals + +If you don't want anyone to be able to `terraform apply`, you can run Atlantis with `--require-approval` or add that setting to your [atlantis.yaml file](https://www.runatlantis.io/docs/command-requirements.html#approved). + +This will ensure that the pull request has been approved before someone can run `apply`. + +## Other Features + +### Customizable Commands + +Apart from being able to `plan` and `apply` from the pull request, Atlantis also enables you to customize the exact commands that are run via an `atlantis.yaml` config file. For example to use the `-var-file` flag: + +```yaml{14} +# atlantis.yaml +version: 2 +projects: +- name: staging + dir: "." + workflow: staging + +workflows: + staging: + plan: + steps: + - init + - plan: + extra_args: ["-var-file", "staging.tfvars"] +``` + +### Locking For Coordination + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic8.webp) + +Atlantis will prevent other pull requests from running against the same directory as an open pull request so that each plan is applied atomically. Once the first pull request is merged, other pull requests are unlocked. + +## Next Steps + +If you're interested in using Atlantis with Bitbucket, check out our Getting Started docs. Happy Terraforming! diff --git a/runatlantis.io/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform.md b/runatlantis.io/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform.md new file mode 100644 index 0000000000..a9506073f3 --- /dev/null +++ b/runatlantis.io/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform.md @@ -0,0 +1,174 @@ +--- +title: Hosting Our Static Site over SSL with S3, ACM, CloudFront and Terraform +lang: en-US +--- + +# Hosting Our Static Site over SSL with S3, ACM, CloudFront and Terraform + +::: info +This post was originally written on March 4, 2018 + +Original post: <https://medium.com/runatlantis/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform-513b799aec0f> +::: + +In this post I cover how I hosted <www.runatlantis.io> using + +- S3 — for storing the static site +- CloudFront — for serving the static site over SSL +- AWS Certificate Manager — for generating the SSL certificates +- Route53 — for routing the domain name <www.runatlantis.io> to the correct location + +I chose Terraform in this case because Atlantis is a tool for automating and collaborating on Terraform in a team (see github.com/runatlantis/atlantis)–and so obviously it made sense to host our homepage using Terraform–but also because it's now much easier to manage. I don't have to go into the AWS console and click around to find what settings I want to change. Instead I can just look at ~100 lines of code, make a change, and run `terraform apply`. + +::: info +NOTE: 4 months after this writing, I moved the site to [Netlify](https://www.netlify.com/) because it automatically builds from my master branch on any change, updates faster since I don't need to wait for the Cloudfront cache to expire and gives me [deploy previews](https://www.netlify.com/blog/2016/07/20/introducing-deploy-previews-in-netlify/) of changes. The DNS records are still hosted on AWS. +::: + +# Overview + +There's a surprising number of components required to get all this working so I'm going to start with an overview of what they're all needed for. Here's what the final architecture looks like: + +![](/blog/hosting-our-static-site/pic1.webp) + +That's what the final product looks like, but lets start with the steps required to get there. + +## Step 1 — Generate The Site + +The first step is to have a site generated. Our site uses [Hugo](https://gohugo.io/), a Golang site generator. Once it's set up, you just need to run `hugo` and it will generate a directory with HTML and all your content ready to host. + +## Step 2 — Host The Content + +Once you've got a website, you need it to be accessible on the internet. I used S3 for this because it's dirt cheap and it integrates well with all the other necessary components. I simply upload my website folder to the S3 bucket. + +## Step 3 — Generate an SSL Certificate + +I needed to generate an SSL certificate for <https://www.runatlantis.io>. I used the AWS Certificate Manager for this because it's free and is easily integrated with the rest of the system. + +## Step 4 — Set up DNS + +Because I'm going to host the site on AWS services, I need requests to <www.runatlantis.io> to be routed to those services. Route53 is the obvious solution. + +## Step 5 — Host with CloudFront + +At this point, we've generated an SSL certificate for <www.runatlantis.io> and our website is available on the internet via its S3 url so can't we just CNAME to the S3 bucket and call it a day? Unfortunately not. + +Since we generated our own certificate, we would need S3 to sign its responses using our certificiate. S3 doesn't support this and thus we need CloudFront. CloudFront supports using our own SSL cert and will just pull its data from the S3 bucket. + +# Terraform Time + +Now that we know what our architecture should look like, it's simply a matter of writing the Terraform. + +## Initial Setup + +Create a new file `main.tf`: + +<<< @/public/blog/hosting-our-static-site/code/main.tf + +## S3 Bucket + +Assuming we've generated our site content already, we need to create an S3 bucket to host the content. + +<<< @/public/blog/hosting-our-static-site/code/s3-bucket.tf + +We should be able to run Terraform now to create the S3 bucket + +```sh +terraform init +`terraform apply` +``` + +![](/blog/hosting-our-static-site/pic2.webp) + +Now we want to upload our content to the S3 bucket: + +```sh +$ cd dir/with/website +# generate the HTML +$ hugo -d generated +$ cd generated +# send it to our S3 bucket +$ aws s3 sync . s3://www.runatlantis.io/ # change this to your bucket +``` + +Now we need the S3 url to see our content: + +```sh +$ terraform state show aws_s3_bucket.www | grep website_endpoint +website_endpoint = www.runatlantis.io.s3-website-us-east-1.amazonaws.com +``` + +You should see your site hosted at that url! + +## SSL Certificate + +Let's use the AWS Certificate Manager to create our SSL certificate. + +<<< @/public/blog/hosting-our-static-site/code/ssl-cert.tf + +Before you run `terraform apply`, ensure you're forwarding any of + +- `administrator@your_domain_name` +- `hostmaster@your_domain_name` +- `postmaster@your_domain_name` +- `webmaster@your_domain_name` +- `admin@your_domain_name` + +To an email address you can access. Then, run `terraform apply` and you should get an email from AWS to confirm you own this domain where you'll need to click on the link. + +## CloudFront + +Now we're ready for CloudFront to host our website using the S3 bucket for the content and using our SSL certificate. Warning! There's a lot of code ahead but most of it is just defaults. + +<<< @/public/blog/hosting-our-static-site/code/cloudfront.tf + +Apply the changes with `terraform apply` and then find the domain name that CloudFront gives us: + +```sh +$ terraform state show aws_cloudfront_distribution.www_distribution | grep ^domain_name +domain_name = d1l8j8yicxhafq.cloudfront.net +``` + +You'll probably get an error if you go to that URL right away. You need to wait a couple minutes for CloudFront to set itself up. It took me 10 minutes. You can view its progress in the console: <https://console.aws.amazon.com/cloudfront/home> + +## DNS + +We're almost done! We've got CloudFront hosting our site, now we need to point our DNS at it. + +<<< @/public/blog/hosting-our-static-site/code/dns.tf + +If you bought your domain from somewhere else like Namecheap, you'll need to point your DNS at the nameservers listed in the state for the Route53 zone you created. First `terraform apply` (which may take a while), then find out your nameservers. + +```sh +$ terraform state show aws_route53_zone.zone +id = Z2FNAJGFW912JG +comment = Managed by Terraform +force_destroy = false +name = runatlantis.io +name_servers.# = 4 +name_servers.0 = ns-1349.awsdns-40.org +name_servers.1 = ns-1604.awsdns-08.co.uk +name_servers.2 = ns-412.awsdns-51.com +name_servers.3 = ns-938.awsdns-53.net +tags.% = 0 +zone_id = Z2FNAJGFW912JG +``` + +Then look at your domain's docs for how to change your nameservers to all 4 listed. + +## That's it...? + +Once the DNS propagates you should see your site at `https://www.yourdomain`! But what about `https://yourdomain`? i.e. without the `www.`? Shouldn't this redirect to `https://www.yourdomain`? + +## Root Domain + +It turns out, we need to create a whole new S3 bucket, CloudFront distribution and Route53 record just to get this to happen. That's because although S3 can serve up a redirect to the www version of your site, it can't host SSL certs and so you need CloudFront. I've included all the terraform necessary for that below. + +Congrats! You're done! + +<iframe src="https://cdn.embedly.com/widgets/media.html?src=https%3A%2F%2Fgiphy.com%2Fembed%2Fl0MYt5jPR6QX5pnqM%2Ftwitter%2Fiframe&display_name=Giphy&url=https%3A%2F%2Fmedia.giphy.com%2Fmedia%2Fl0MYt5jPR6QX5pnqM%2Fgiphy.gif&image=https%3A%2F%2Fi.giphy.com%2Fmedia%2Fl0MYt5jPR6QX5pnqM%2Fgiphy.gif&key=d04bfffea46d4aeda930ec88cc64b87c&type=text%2Fhtml&schema=giphy" allowfullscreen="" frameborder="0" height="244" width="435" title="The Office Party Hard GIF - Find & Share on GIPHY" class="fr n gh dv bg" scrolling="no"></iframe> + +If you're using Terraform in a team, check out Atlantis: <https://github.com/runatlantis/atlantis> for automation and collaboration to make your team happier! + +Here's the Terraform needed to redirect your root domain: + +<<< @/public/blog/hosting-our-static-site/code/full.tf diff --git a/runatlantis.io/blog/2018/joining-hashicorp.md b/runatlantis.io/blog/2018/joining-hashicorp.md new file mode 100644 index 0000000000..69ab04cda8 --- /dev/null +++ b/runatlantis.io/blog/2018/joining-hashicorp.md @@ -0,0 +1,50 @@ +--- +title: I'm Joining HashiCorp! +lang: en-US +--- + +# I'm Joining HashiCorp + +::: info +This post was originally written on October 23th, 2018 + +Original post: <https://medium.com/runatlantis/joining-hashicorp-200ee9572dc5> +::: + +Dear Atlantis Community, + +My name is Luke and I'm the maintainer of [Atlantis](https://www.runatlantis.io/), an open source tool for Terraform collaboration. Today I'm excited to announce that I'm joining HashiCorp! + +![](/blog/joining-hashicorp/pic1.webp) + +## What Does This Mean For Atlantis? + +In the near term, nothing will change for Atlantis and its users. As a HashiCorp employee I will continue to maintain Atlantis, review pull requests, triage issues, and write code. + +In the long term, HashiCorp and I want to address collaboration workflows for all users of Terraform. We are still working out the details of how Atlantis will fit into the longer term plan, but whatever direction we take, we're committed to keeping Atlantis free and open source. + +## HashiCorp and Atlantis + +Why does HashiCorp want to support Atlantis? + +Today HashiCorp [announced their commitment to provide collaboration solutions to the whole Terraform community](https://www.hashicorp.com/blog/terraform-collaboration-for-everyone). They see the Atlantis project as one manifestation of this vision and understand its importance to many in the Terraform community. They believe that by working together, we can create a solution that will scale from a single user to hundreds of collaborators in a large organization. + +## Why am I joining? + +Those of you who know me, may wonder why I made this decision. It came down to wanting to continue working on Atlantis–and the larger story of Terraform collaboration–and finding a way to support myself. + +In January, 9 months ago, I quit my job at Hootsuite to work **full time** on Atlantis (Atlantis was originally created at Hootsuite by my friend [Anubhav Mishra](https://twitter.com/anubhavm)). I left because I knew that the Terraform community was in need of a solution for collaboration and that with full time development, Atlantis could be that solution. + +During the last 9 months, Atlantis matured into a fully fledged collaboration solution and gained many new users. It has been an amazing time, but I've been working for free! I've always known that for Atlantis to be successful in the long term, I would need to find a way to support myself. + +A couple of weeks ago, as I was playing around with Atlantis monetization strategies, HashiCorp contacted me. I learned that they shared a vision of building Terraform collaboration solutions for the broader community and that they were interested in combining forces. They also assured me that they wanted to do right by the Atlantis community. + +This was a compelling offer versus solo-founding a company around Atlantis: I would be able to focus on coding and product instead of business and sales and I could spend all of my time on Atlantis and the larger story of Terraform collaboration. As a result, I came to the conclusion that joining HashiCorp was the right decision for me and the community. + +## Conclusion + +Atlantis has been a passion of mine for almost two years now. I deeply care about the future of the project and its community and I know that this move will ensure that that future is bright. + +There are probably some questions I haven't answered in this post so please don't hesitate to reach out, either via [Twitter](https://twitter.com/lkysow) or on the [Atlantis Slack](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw). + +I'm excited for the future of Atlantis and Terraform collaboration and I hope you are too. diff --git a/runatlantis.io/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too.md b/runatlantis.io/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too.md new file mode 100644 index 0000000000..88b28a1b16 --- /dev/null +++ b/runatlantis.io/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too.md @@ -0,0 +1,244 @@ +--- +title: "Putting The Dev Into DevOps: Why Your Developers Should Write Terraform Too" +lang: en-US +--- + +# Putting The Dev Into DevOps: Why Your Developers Should Write Terraform Too + +::: info +This post was originally written on August 29th, 2018 + +Original post: <https://medium.com/runatlantis/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too-d3c079dfc6a8> +::: + +[Terraform](https://www.terraform.io/) is an amazing tool for provisioning infrastructure. Terraform enables your operators to perform their work faster and more reliably. + +**But if only your ops team is writing Terraform, you're missing out.** + +Terraform is not just a tool that makes ops teams more effective. Adopting Terraform is an opportunity to turn all of your developers into operators (at least for smaller tasks). This can make your entire engineering team more effective and create a better relationship between developers and operators. + +### Quick Aside — What is Terraform? + +Terraform is two things. It's a language for describing infrastructure: + +```tf +resource "aws_instance" "example" { + ami = "ami-2757f631" + instance_type = "t2.micro" +} +``` + +And it's a CLI tool that reads Terraform code and makes API calls to AWS (or any other cloud provider) to provision that infrastructure. + +In this example, we're using the CLI to run `terraform apply` which will create an EC2 instance: + +```sh +$ terraform apply + +Terraform will perform the following actions: + + # aws_instance.example + + aws_instance.example + ami: "ami-2757f631" + instance_type: "t2.micro" + ... + +Plan: 1 to add, 0 to change, 0 to destroy. + +Do you want to perform these actions? + Terraform will perform the actions described above. + Only 'yes' will be accepted to approve. + + Enter a value: yes + +aws_instance.example: Creating... + ami: "" => "ami-2757f631" + instance_type: "" => "t2.micro" + ... + +aws_instance.example: Still creating... (10s elapsed) +aws_instance.example: Creation complete + +Apply complete! Resources: 1 added, 0 changed, 0 destroyed. +``` + +## Terraform Adoption From A Dev's Perspective + +Adopting Terraform is great for your operations team's effectiveness but it doesn't change much for devs. Before Terraform adoption, devs typically interacted with an ops team like this: + +![](/blog/putting-the-dev-into-devops/pic1.webp) + +1. **Dev: Creates ticket asking for some ops work** +2. **Dev: Waits** +3. _Ops: Looks at ticket when in queue_ +4. _Ops: Does work_ +5. _Ops: Updates ticket_ +6. **Dev: Continues their work** + +After the Ops team adopts Terraform, the workflow from a dev's perspective is the same! + +![](/blog/putting-the-dev-into-devops/pic2.webp) + +1. **Dev: Creates ticket asking for some ops work** +2. **Dev: Waits** +3. _Ops: Looks at ticket when in queue_ +4. _Ops: Does work. This time using Terraform (TF)_ +5. _Ops: Updates ticket_ +6. **Dev: Continues their work** + +With Terraform, there's less of Step 2 (Dev: Waits) but apart from that, not much has changed. + +> If only ops is writing Terraform, your developers' experience is the same. + +## Devs Want To Help + +Developers would love to help out with operations work. They know that for small changes they should be able to do the work themselves (with a review from ops). For example: + +- Adding a new security group rule +- Increasing the size of an autoscaling group +- Using a larger instance because their app needs more memory + +Developers could make all of these changes because they're small and well defined. Also, previous examples of doing the same thing can guide them. + +## ...But Often They're Not Allowed + +In many organizations, devs are locked out of the cloud console. + +![](/blog/putting-the-dev-into-devops/pic3.webp) + +They might be locked out for good reasons: + +- Security — You can do a lot of damage with full access to a cloud console +- Compliance — Maybe your compliance requires only certain groups to have access +- Cost — Devs might spin up some expensive resources and then forget about them + +Even if they have access, operations can be complicated: + +- It's often difficult to do seemingly simple things (think adding a security group rule that also requires peering VPCs). This means that just having access sometimes isn't enough. Devs might need help from an expert to get things done. + +## Enter Terraform + +With Terraform, everything changes. Or at least it can. + +Now Devs can see in code how infrastructure is built. They can see the exact spot where security group rules are configured: + +```tf +resource "aws_security_group_rule" "allow_all" { + type = "ingress" + from_port = 0 + to_port = 65535 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + security_group_id = "sg-123456" +} + +resource "aws_security_group_rule" "allow_office" { + ... +} +``` + +Or where the size of the autoscaling group is set: + +```tf +resource "aws_autoscaling_group" "asg" { + name = "my-asg" + max_size = 5 + desired_capacity = 4 + min_size = 2 + ... +} +``` + +Devs understand code (surprise!) so it's a lot easier for them to make those small changes. + +Here's the new workflow: + +![](/blog/putting-the-dev-into-devops/pic4.webp) + +1. **Dev: Writes Terraform code** +2. **Dev: Creates pull request** +3. _Ops: Reviews pull request_ +4. **Dev: Applies the change with Terraform (TF)** +5. **Dev: Continues their work** + +Now: + +- Devs are making small changes themselves. This saves time and increases the speed of the whole engineering organization. +- Devs can see exactly what is required to make the change. This means there's less back and forth over a ticket: “Okay so I know you need the security group opened between server A and B, but on which ports and with which protocol?” +- Devs start to see how infrastructure is built. This increases cooperation between dev and ops because they can understand each other's work. + +Great! But there's another problem. + +## Devs Are Locked Out Of Terraform Too + +In order to execute Terraform you need to have cloud credentials! It's really hard to write Terraform without being able to run `terraform init` and `terraform plan`, for the same reason it would be hard to write code if you could never run it locally! + +So are we back at square one? + +## Enter Atlantis + +[Atlantis](https://www.runatlantis.io/) is an [open source](https://github.com/runatlantis/atlantis) tool for running Terraform from pull requests. With Atlantis, Terraform is run on a separate server (Atlantis is self-hosted) so you don't need to give out credentials to everyone. Access is controlled through pull request approvals. + +Here's what the workflow looks like: + +### Step 1 — Create a Pull Request + +A developer creates a pull request with their change to add a security group rule. + +![](/blog/putting-the-dev-into-devops/pic5.webp) + +### Step 2 — Atlantis Runs Terraform Plan + +Atlantis automatically runs `terraform plan` and comments back on the pull request with the output. Now developers can fix their Terraform errors before asking for a review. + +![](/blog/putting-the-dev-into-devops/pic6.webp) + +### Step 3 — Fix The Terraform + +The developer pushes a new commit that fixes their error and Atlantis comments back with the valid `terraform plan` output. Now the developer can verify that the plan output looks good. + +![](/blog/putting-the-dev-into-devops/pic7.webp) + +### Step 4 — Get Approval + +You'll probably want to run Atlantis with the --require-approval flag that requires pull requests to be Approved before running atlantis apply. + +![](/blog/putting-the-dev-into-devops/pic8.webp) + +### Step 4a — Actually Get Approval + +An operator can now come along and review the changes and the output of `terraform plan`. This is much faster than doing the change themselves. + +![](/blog/putting-the-dev-into-devops/pic9.webp) + +### Step 5 — Apply + +To apply the changes, the developer or operator comments “atlantis apply”. + +![](/blog/putting-the-dev-into-devops/pic10.webp) + +## Success + +Now we've got a workflow that makes everyone happy: + +- Devs can write Terraform and iterate on the pull request until the `terraform plan` looks good +- Operators can review pull requests and approve the changes before they're applied + +Now developers can make small operations changes and learn more about how infrastructure is built. Everyone can work more effectively and with a shared understanding that enhances collaboration. + +## Does It Work In Practice? + +Atlantis has been used by my previous company, Hootsuite, for over 2 years. It's used daily by 20 operators but it's also used occasionally by over 60 developers! +Another company uses Atlantis to manage 600+ Terraform repos collaborated on by over 300 developers and operators. + +## Next Steps + +- If you'd like to learn more about Terraform, check out HashiCorp's [Introduction to Terraform](https://developer.hashicorp.com/terraform/intro) +- If you'd like to try out Atlantis, go to <www.runatlantis.io> +- If you have any questions, reach out to me on Twitter ([at]lkysow) or in the comments below. + +## Credits + +- Thanks to [Seth Vargo](https://medium.com/@sethvargo) for his talk [Version-Controlled Infrastructure with GitHub](https://www.youtube.com/watch?v=2TWqi7dLSro) that inspired a lot of this post. +- Thanks to Isha for reading drafts of this post. +- Icons in graphics from made by [Freepik](http://freepik.com/) from [Flaticon](https://www.flaticon.com/) and licensed by [CC 3.0](https://creativecommons.org/licenses/by/3.0/) diff --git a/runatlantis.io/blog/2018/terraform-and-the-dangers-of-applying-locally.md b/runatlantis.io/blog/2018/terraform-and-the-dangers-of-applying-locally.md new file mode 100644 index 0000000000..081d2f1154 --- /dev/null +++ b/runatlantis.io/blog/2018/terraform-and-the-dangers-of-applying-locally.md @@ -0,0 +1,120 @@ +--- +title: Terraform And The Dangers Of Applying Locally +lang: en-US +--- + +# Terraform And The Dangers Of Applying Locally + +::: info +This post was originally written on July 13th, 2018 + +Original post: <https://medium.com/runatlantis/terraform-and-the-dangers-of-applying-locally-543563782a73> +::: + +If you're using Terraform then at some point you've likely ran a `terraform apply` that reverted someone else's change! + +Here's how that tends to happen: + +## The Setup + +Say we have two developers: Alice and Bob. Alice needs to add a new security group rule. She checks out a new branch, adds her rule and creates a pull request: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic1.webp) + +When she runs `terraform plan` locally she sees what she expects. + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic2.webp) + +Meanwhile, Bob is working on an emergency fix. He checks out a new branch and adds a different security group rule called `emergency`: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic3.webp) + +And, because it's an emergency, he **immediately runs apply**: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic4.webp) + +Now back to Alice. She's just gotten approval on her pull request change and so she runs `terraform apply`: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic5.webp) + +Did you catch what happened? Did you notice that the `apply` deleted Bob's rule? + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic6.webp) + +In this example, it wasn't too hard to see. However if the plan is much longer, or if the change is less obvious then it can be easy to miss. + +## Possible Solutions + +There are some ways to avoid this: + +### Use terraform plan `-out` + +If Alice had run `terraform plan -out plan.tfplan` then when she ran `terraform apply plan.tfplan` she would see: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic7.webp) + +The problem with this solution is that few people run `terraform plan` anymore, much less `terraform plan -out`! + +<iframe src="https://cdn.embedly.com/widgets/media.html?type=text%2Fhtml&key=a19fcc184b9711e1b4764040d3dc5c07&schema=twitter&url=https%3A//twitter.com/sethvargo/status/989979940098424832&image=https%3A//i.embed.ly/1/image%3Furl%3Dhttps%253A%252F%252Fpbs.twimg.com%252Fprofile_images%252F808025120296013825%252FfrGuc14s_400x400.jpg%26key%3Da19fcc184b9711e1b4764040d3dc5c07" allowfullscreen="" frameborder="0" height="249" width="680" title="Seth Vargo on Twitter" class="fr n gh dv bg" scrolling="no"></iframe> + +It's easier to just run `terraform apply` and humans will take the easier path most of the time. + +### Wrap `terraform apply` to ensure up to date with `master` + +Another possible solution is to write a wrapper script that ensures our branch is up to date with `master`. But this doesn't solve the problem of Bob running `apply` locally and not yet merging to `master`. In this case, Alice's branch would have been up to date with `master` but not the latest apply'd state. + +### Be more disciplined + +What if everyone: + +- ALWAYS created a branch, got a pull request review, merged to `master` and then ran apply. And also everyone +- ALWAYS checked to ensure their branch was rebased from `master`. And also everyone +- ALWAYS carefully inspected the `terraform plan` output and made sure it was exactly what they expected + +...then we wouldn't have a problem! + +Unfortunately this is not a real solution. We're all human and we're all going to make mistakes. Relying on people to follow a complicated process 100% of the time is not a solution because it doesn't work. + +## Core Problem + +The core problem is that everyone is applying from their own workstations and it's up to them to ensure that they're up to date and that they keep `master` up to date. This is like developers deploying to production from their laptops. + +### What if, instead of applying locally, a remote system did the apply's? + +This is why we built [Atlantis](https://www.runatlantis.io/) – an open source project for Terraform automation by pull request. You could also accomplished this with your own CI system or with [Terraform Enterprise](https://www.hashicorp.com/products/terraform). Here's how Atlantis solves this issue: + +When Alice makes her change, she creates a pull request and Atlantis automatically runs `terraform plan` and comments on the pull request. + +When Bob makes his change, he creates a pull request and Atlantis automatically runs `terraform plan` and comments on the pull request. + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic8.webp) + +Atlantis also **locks the directory** to ensure that no one else can run `plan` or `apply` until Alice's plan has been intentionally deleted or she merges the pull request. + +If Bob creates a pull request for his emergency change he'd see this error: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic9.webp) + +Alice can then comment `atlantis apply` and Atlantis will run the apply itself: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic10.webp) + +Finally, she merges the pull request and unlocks Bob's branch: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic11.webp) + +### But what if Bob ran `apply` locally? + +In that case, Alice is still okay because when Atlantis ran `terraform plan` it used `-out`. If Alice tries to apply that plan, Terraform will give an error because the plan was generated against an old state. + +### Why does Atlantis run `apply` on the branch and not after a merge to `master`? + +We do this because `terraform apply` fails quite often, despite `terraform plan` succeeding. Usually it's because of a dependency issue between resources or because the cloud provider requires a certain format or a certain field to be set. Regardless, in practice we've found that `apply` fails a lot. + +By locking the directory, we're essentially ensuring that the branch being `apply`'d is `"master"` since no one else can modify that state. We then get the benefit of being able to iterate on the pull request and push small fixes until we're sure that the changeset is `apply`'d. If `apply` failed after merging to `master`, we'd have to open new pull requests over and over again. There is definitely a tradeoff here, however we believe it's the right tradeoff. + +## Conclusion + +In conclusion, running `terraform apply` when you're working with a team of operators can be dangerous. Look to solutions like your own CI, Atlantis or Terraform Enterprise to ensure you're always working off the latest code that was `apply`'d. + +If you'd like to try Atlantis, you can get started here: <https://www.runatlantis.io/guide/> diff --git a/runatlantis.io/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage.md b/runatlantis.io/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage.md new file mode 100644 index 0000000000..07bfd8ccf9 --- /dev/null +++ b/runatlantis.io/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage.md @@ -0,0 +1,157 @@ +--- +title: 4 Reasons To Try HashiCorp's (New) Free Terraform Remote State Storage +lang: en-US +--- + +# 4 Reasons To Try HashiCorp's (New) Free Terraform Remote State Storage + +::: info +This post was originally written on April 2nd, 2019 + +Original post: <https://medium.com/runatlantis/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage-b03f01bfd251> +::: + +Update (May 20/19) — Free State Storage is now called Terraform Cloud and is out of Beta, meaning anyone can sign up! + +HashiCorp is planning to offer free Terraform Remote State Storage and they have a beta version available now. In this article, I talk about 4 reasons you should try it (Disclosure: I work at HashiCorp). + +> _Sign up for Terraform Cloud [here](https://goo.gl/X5t5EM)._ + +## What is Terraform State? + +Before I get into why you should use the new remote state storage, let's talk about what exactly we mean by state in Terraform. + +Terraform uses _state_ to map your Terraform code to the real-world resources that it provisions. For example, if I have Terraform code to create an AWS EC2 instance: + +```tf +resource "aws_instance" "web" { + ami = "ami-e6d9d68c" + instance_type = "t2.micro" +} +``` + +When I run `terraform apply`, Terraform will make a “create EC2 instance” API call to AWS and AWS will return the unique ID of that instance (ex. `i-0ad17607e5ee026d0`). Terraform needs to record that ID somewhere so that later, it can make API calls to change or delete the instance. + +To store this information, Terraform uses a state file. For the above code, the state file will look something like: + +```json{4,7} +{ + ... + "resources": { + "aws_instance.web": { + "type": "aws_instance", + "primary": { + "id": "i-0ad17607e5ee026d0", + ... +} +``` + +Here you can see that the resource `aws_instance.web` from our Terraform code is mapped to the instance ID `i-0ad17607e5ee026d0`. + +So if Terraform state is just a file, then what is remote state? + +## Remote State + +By default, Terraform writes its state file to your local filesystem. This is okay for personal projects, but once you start working with a team, things get messy. In a team, you need to make sure everyone has an up to date version of the state file **and** ensure that two people aren't making concurrent changes. + +Enter remote state! Remote state is just storing the state file remotely, rather than on your filesystem. With remote state, there's only one copy so Terraform can ensure you're always up to date. To prevent team members from modifying state at the same time, Terraform can lock the remote state. + +> Remote state is just storing the state file remotely, rather than on your filesystem. + +Alright, so remote state is great, but unfortunately setting it up can be a bit tricky. In AWS, you can store it in an S3 bucket, but you need to create the bucket, configure it properly, set up its permissions properly, create a DynamoDB table for locking and then ensure everyone has proper credentials to write to it. It's much the same story in the other clouds. + +As a result, setting up remote state can be an annoying stumbling block as teams adopt Terraform. + +This brings us to the first reason to try HashiCorp's Free Remote State Storage... + +## Reason #1 — Easy To Set Up + +Unlike other remote state solutions that require complicated setup to get right, setting up free remote state storage is easy. + +> Setting up HashiCorp's free remote state storage is easy + +Step 1 — Sign up for your [free Terraform Cloud](https://app.terraform.io/signup) account + +Step 2 — When you log in, you'll land on this page where you'll create your organization: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic1.webp) + +Step 3 — Next, go into User Settings and generate a token: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic2.webp) + +Step 4 — Take this token and create a local ~/.terraformrc file: + +```tf +credentials "app.terraform.io" { + token = "mhVn15hHLylFvQ.atlasv1.jAH..." +} +``` + +Step 5 — That's it! Now you're ready to store your state. + +In your Terraform project, add a `terraform` block: + +```tf{3,5} +terraform { + backend "remote" { + organization = "my-org" # org name from step 2. + workspaces { + name = "my-app" # name for your app's state. + } + } +} +``` + +Run `terraform init` and tada! Your state is now being stored in Terraform Enterprise. You can see the state in the UI: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic3.webp) + +Speaking of seeing state in a UI... + +## Reason #2 — Fully Featured State Viewer + +The second reason to try Terraform Cloud is its fully featured state viewer: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic4.webp) + +If you've ever messed up your Terraform state and needed to download an old version or wanted an audit log to know who changed what, then you'll love this feature. + +You can view the full state file at each point in time: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic5.webp) + +You can also see the diff of what changed: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic6.webp) + +Of course, you can find a way to get this information from some of the other state backends, but it's difficult. With HashiCorp's remote state storage, you get it for free. + +## Reason #3 — Manual Locking + +The third reason to try Terraform Cloud is the ability to manually lock your state. + +Ever been working on a piece of infrastructure and wanted to ensure that no one could make any changes to it at the same time? + +Terraform Cloud comes with the ability to lock and unlock states from the UI: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic7.webp) + +While the state is locked, `terraform` operations will receive an error: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic8.webp) + +This saves you a lot of these: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic9.webp) + +## Reason #4 — Works With Atlantis + +The final reason to try out Terraform Cloud is that it works flawlessly with [Atlantis](https://www.runatlantis.io/)! + +Set a `ATLANTIS_TFE_TOKEN` environment variable to a TFE token and you're ready to go. Head over to <https://www.runatlantis.io/docs/terraform-cloud.html> to learn more. + +Conclusion +I highly encourage you to try out the new free Remote State Storage backend. It's a compelling offering over other state backends thanks to its ease of set up, fully featured state viewer and locking capabilities. + +If you're not on the waitlist, sign up here: <https://app.terraform.io/signup>. diff --git a/runatlantis.io/blog/2024/april-2024-survey-results.md b/runatlantis.io/blog/2024/april-2024-survey-results.md new file mode 100644 index 0000000000..d4c92fa9df --- /dev/null +++ b/runatlantis.io/blog/2024/april-2024-survey-results.md @@ -0,0 +1,56 @@ +--- +title: Atlantis User Survey Results +lang: en-US +--- + +# Atlantis User Survey Results + +In April 2024, the Core Atlantis Team launched an anonymous survey of our users. Over the two months the survey was open we received 354 responses, which we will use to better understand our community's needs and help prioritize our roadmap. + +Overall, the results below show that we have a diverse set of enthusiastic users, and that though many are still the classic Atlantis setup (a handful of repos running terraform against AWS in GitHub), there are many different use cases and directions the community are going and would like to see Atlantis support. + +We are grateful for everyone who took the time to share their experiences with Atlantis. We plan to run this kind of survey on a semi-regular basis, stay tuned! + +## Anonymized Results + +### How do you interact with Atlantis? + +![](/blog/april-2024-survey-results/interact.webp) + +Unsurprisingly, most users of Atlantis wear multiple hats, involved throughout the development process. + +### How do you/your organization deploy Atlantis + +![](/blog/april-2024-survey-results/deploy.webp) + +Most users of terraform deploy using Kubernetes and/or AWS. "Other Docker" use docker but do not use EKS or Helm directly, while a minority use some other combination of technologies. + +### What Infrastructure as Code (IaC) tool(s) do you use with Atlantis? + +![](/blog/april-2024-survey-results/iac.webp) + +The vast majority of Atlantis users are still using terraform as some part of their deployment. About half of them are in addition using Terragrunt, and OpenTofu seems to be gaining some ground. + +### How many repositories does your Atlantis manage? + +![](/blog/april-2024-survey-results/repos.webp) + +Most users have relatively modest footprints to managed with Atlantis (though a few large monorepos could be obscured in the numbers). + +### Which Version Control Systems (VCSs) do you use? + +![](/blog/april-2024-survey-results/vcs.webp) + +Most users of Atlantis are using GitHub, with a sizeable chunk on GitLab, followed by Bitbucket and others. This is analogous to the support and feature requests that the maintainers see for the various VCSs in the codebase. + +### What is the most important feature you find missing from Atlantis? + +![](/blog/april-2024-survey-results/features.webp) + +This being a free form question, there was a long tail of responses, so the above only shows answers after normalizing that had three or more instances. + +Drift Detection as well as infrastructure improvements were the obvious winners here. After that, users focused on various integrations and improvements to the UI. + +## Conclusion + +It is always interesting and exciting for the core team to see the breadth of the use of Atlantis, and we look forward to using this information to understand the needs of the community. Atlantis has always been a community led effort, and we hope to continue to carry that spirit forward! diff --git a/runatlantis.io/blog/2024/integrating-atlantis-with-opentofu.md b/runatlantis.io/blog/2024/integrating-atlantis-with-opentofu.md new file mode 100644 index 0000000000..574017fb71 --- /dev/null +++ b/runatlantis.io/blog/2024/integrating-atlantis-with-opentofu.md @@ -0,0 +1,200 @@ +--- +title: Integrating Atlantis with Opentofu +lang: en-US +--- + +# Integrating Atlantis with Opentofu + +::: info +This post was originally written on May 27nd, 2024 +Original post: <https://dev.to/jmateusousa/integrating-atlantis-with-opentofu-lnd> +::: + +## What was our motivation? + +Due to the Terraform license change, many companies are migrating their IAC processes to OpenTofu, with this in mind and knowing that many of them use Atlantis and Terraform as infrastructure delivery automation, I created this documentation showing what to do to integrate Atlantis with OpenTofu. + +Stack: Atlantis, Terragrunt, OpenTofu, Github, ALB, EKS. + +We will implement it with your [Helm chart](https://www.runatlantis.io/docs/deployment.html#kubernetes-helm-chart): + +**1** - Add the runatlantis repository. + +```sh +helm repo add runatlantis https://runatlantis.github.io/helm-charts +``` + +**2** - Create file values.yaml and run: + +```sh +helm inspect values runatlantis/atlantis > values.yaml +``` + +**3** - Edit the file values.yaml and add your credentials access and secret which will be used in the Atlantis webhook configuration: +See as create a [GitHubApp](https://docs.github.com/pt/apps/creating-github-apps/about-creating-github-apps). + +```yaml +githubApp: + id: "CHANGE ME" + key: | + -----BEGIN RSA PRIVATE KEY----- + "CHANGE ME" + -----END RSA PRIVATE KEY----- + slug: atlantis +# secret webhook Atlantis + secret: "CHANGE ME" +``` + +**4** - Enter the org and repository from github that Atlantis will interact in orgAllowlist: + +```yaml +# All repositories the org +orgAllowlist: github.com/MY-ORG/* + +or +# Just one repository +orgAllowlist: github.com/MY-ORG/MY-REPO-IAC + +or +# All repositories that start with MY-REPO-IAC- +orgAllowlist: github.com/MY-ORG/MY-REPO-IAC-* +``` + +**5** - Now let’s configure the script that will be executed upon startup of the Atlantis init pod. In this step we download and install Terragrunt and OpenTofu, as well as include their binaries in the shared dir ```/plugins```. + +```yaml +initConfig: + enabled: true + image: alpine:latest + imagePullPolicy: IfNotPresent + # sharedDir is set as env var INIT_SHARED_DIR + sharedDir: /plugins + workDir: /tmp + sizeLimit: 250Mi + # example of how the script can be configured to install tools/providers required by the atlantis pod + script: | + #!/bin/sh + set -eoux pipefail# terragrunt + TG_VERSION="0.55.10" + TG_SHA256_SUM="1ad609399352348a41bb5ea96fdff5c7a18ac223742f60603a557a54fc8c6cff" + TG_FILE="${INIT_SHARED_DIR}/terragrunt" + wget https://github.com/gruntwork-io/terragrunt/releases/download/v${TG_VERSION}/terragrunt_linux_amd64 -O "${TG_FILE}" + echo "${TG_SHA256_SUM} ${TG_FILE}" | sha256sum -c + chmod 755 "${TG_FILE}" + terragrunt -v + + # OpenTofu + TF_VERSION="1.6.2" + TF_FILE="${INIT_SHARED_DIR}/tofu" + wget https://github.com/opentofu/opentofu/releases/download/v${TF_VERSION}/tofu_${TF_VERSION}_linux_amd64.zip + unzip tofu_${TF_VERSION}_linux_amd64.zip + mv tofu ${INIT_SHARED_DIR} + chmod 755 "${TF_FILE}" + tofu -v +``` + +**6** - Here we configure the envs to avoid downloading alternative versions of Terraform and indicate to Terragrunt where it should fetch the OpenTofu binary. + +```yaml +# envs +environment: + ATLANTIS_TF_DOWNLOAD: false + TERRAGRUNT_TFPATH: /plugins/tofu +``` + +**7** - Last but not least, here we specify which Atlantis-side configurations we will have for the repositories. + +```yaml +# repository config +repoConfig: | + --- + repos: + - id: /.*/ + apply_requirements: [approved, mergeable] + allow_custom_workflows: true + allowed_overrides: [workflow, apply_requirements, delete_source_branch_on_merge] +``` + +**8** - Configure Atlantis webhook ingress, in the example below we are using the AWS ALB. + +```yaml +# ingress config +ingress: + annotations: + alb.ingress.kubernetes.io/backend-protocol: HTTP + alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:certificate + alb.ingress.kubernetes.io/group.name: external-atlantis + alb.ingress.kubernetes.io/healthcheck-path: /healthz + alb.ingress.kubernetes.io/healthcheck-port: "80" + alb.ingress.kubernetes.io/healthcheck-protocol: HTTP + alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS":443}]' + alb.ingress.kubernetes.io/scheme: internet-facing + alb.ingress.kubernetes.io/ssl-redirect: "443" + alb.ingress.kubernetes.io/success-codes: "200" + alb.ingress.kubernetes.io/target-type: ip + apiVersion: networking.k8s.io/v1 + enabled: true + host: atlantis.your.domain + ingressClassName: aws-ingress-class-name + path: /* + pathType: ImplementationSpecific +``` + +Save all changes made to ```values.yaml``` + +**9** - Using one of the Atlantis options custom workflows, we can create a file ```atlantis.yaml``` in the root folder of your repository, the example below should meet most scenarios, adapt as needed. + +```yaml +version: 3 +automerge: true +parallel_plan: true +parallel_apply: false +projects: +- name: terragrunt + dir: . + workspace: terragrunt + delete_source_branch_on_merge: true + autoplan: + enabled: false + apply_requirements: [mergeable, approved] + workflow: terragrunt +workflows: + terragrunt: + plan: + steps: + - env: + name: TF_IN_AUTOMATION + value: 'true' + - run: find . -name '.terragrunt-cache' | xargs rm -rf + - run: terragrunt init -reconfigure + - run: + command: terragrunt plan -input=false -out=$PLANFILE + output: strip_refreshing + apply: + steps: + - run: terragrunt apply $PLANFILE +``` + +**10** - Now let’s go to the installation itself, search for the available versions of Atlantis: + +```sh +helm search repo runatlantis +``` + +Replace ```CHART-VERSION``` with the version you want to install and run the command below: + +```sh +helm upgrade -i atlantis runatlantis/atlantis --version CHART-VERSION -f values.yaml --create-namespace atlantis +``` + +Now, see as configure Atlantis [webhook on github](../../docs/configuring-webhooks.md) repository. + +See as Atlantis [work](../../docs/using-atlantis.md). + +Find out more at: + +- <https://www.runatlantis.io/guide.html>. +- <https://opentofu.org/docs/>. +- <https://github.com/runatlantis/atlantis/issues/3741>. + +Share it with your friends =) diff --git a/runatlantis.io/contributing.md b/runatlantis.io/contributing.md new file mode 100644 index 0000000000..3d8e24de16 --- /dev/null +++ b/runatlantis.io/contributing.md @@ -0,0 +1,17 @@ +--- +aside: false +--- +# Atlantis Contributing Documentation + +These docs are for users who want to contribute to the Atlantis project. This +can vary from writing documentation, helping the community on Slack, discussing +issues, or writing code. + +:::tip Looking to get started or use Atlantis? +If you're new, check out the [Guide](./guide.md) or the +[Documentation](./docs.md). +::: + +## Next Steps + +- [Events Controller](./contributing/events-controller.md)  â€“  How do the events work? diff --git a/runatlantis.io/contributing/events-controller.md b/runatlantis.io/contributing/events-controller.md new file mode 100644 index 0000000000..9827aa5cdb --- /dev/null +++ b/runatlantis.io/contributing/events-controller.md @@ -0,0 +1,108 @@ +# Events Controller + +Webhooks are the primary interaction between the Version Control System (VCS) +and Atlantis. Each VCS sends the requests to the `/events` endpoint. The +implementation of this endpoint can be found in the +[events_controller.go](https://github.com/runatlantis/atlantis/blob/main/server/controllers/events/events_controller.go) +file. This file contains the Post function `func (e *VCSEventsController) +Post(w http.ResponseWriter, r *http.Request`)` that parses the request +according to the configured VCS. + +Atlantis currently handles one of the following events: + +- Comment Event +- Pull Request Event + +All the other events are ignored. + +```mermaid +--- +title: events controller flowchart +--- +flowchart LR + events(/events - Endpoint) --> Comment_Event(Comment - Event) + events --> Pull_Request_Event(Pull Request - Event) + + Comment_Event --> pre_workflow(pre-workflow - Hook) + pre_workflow --> plan(plan - command) + pre_workflow --> apply(apply - command) + pre_workflow --> approve_policies(approve policies - command) + pre_workflow --> unlock(unlock - command) + pre_workflow --> version(version - command) + pre_workflow --> import(import - command) + pre_workflow --> state(state - command) + + plan --> post_workflow(post-workflow - Hook) + apply --> post_workflow + approve_policies --> post_workflow + unlock --> post_workflow + version --> post_workflow + import --> post_workflow + state --> post_workflow + + Pull_Request_Event --> Open_Update_PR(Open / Update Pull Request) + Pull_Request_Event --> Close_PR(Close Pull Request) + + Open_Update_PR --> pre_workflow(pre-workflow - Hook) + Close_PR --> plan(plan - command) + + pre_workflow --> plan + plan --> post_workflow(post-workflow - Hook) + + Close_PR --> CleanUpPull(CleanUpPull) + CleanUpPull --> post_workflow(post-workflow - Hook) +``` + +## Comment Event + +This event is triggered whenever a user enters a comment on the Pull Request, +Merge Request, or whatever it's called for the respective VCS. After parsing the +VCS-specific request, the code calls the `handleCommentEvent` function, which +then passes the processing to the `handleCommentEvent` function in the +[command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/command_runner.go) +file. This function first calls the pre-workflow hooks, then executes one of the +below-listed commands and, at last, the post-workflow hooks. + +- [plan_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/plan_command_runner.go) +- [apply_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/apply_command_runner.go) +- [approve_policies_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/approve_policies_command_runner.go) +- [unlock_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/unlock_command_runner.go) +- [version_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/version_command_runner.go) +- [import_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/import_command_runner.go) +- [state_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/state_command_runner.go) + +## Pull Request Event + +To handle comment events on Pull Requests, they must be created first. Atlantis +also allows the running of commands for certain Pull Requests events. + +<details> + <summary>Pull Request Webhooks</summary> + +The list below links to the supported VCSs and their Pull Request Webhook +documentation. + +- [Azure DevOps Pull Request Created](https://learn.microsoft.com/en-us/azure/devops/service-hooks/events?view=azure-devops#pull-request-created) +- [BitBucket Pull Request](https://support.atlassian.com/bitbucket-cloud/docs/event-payloads/#Pull-request-events) +- [GitHub Pull Request](https://docs.github.com/en/webhooks/webhook-events-and-payloads#pull_request) +- [GitLab Merge Request](https://docs.gitlab.com/ee/user/project/integrations/webhook_events.html#merge-request-events) +- [Gitea Webhooks](https://docs.gitea.com/next/usage/webhooks) + +</details> + +The following list shows the supported events: + +- Opened Pull Request +- Updated Pull Request +- Closed Pull Request +- Other Pull Request event + +The `RunAutoPlanCommand` function in the +[command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/command_runner.go) +file is called for the _Open_ and _Update_ Pull Request events. When enabled on +the project, this automatically runs the `plan` for the specific repository. + +Whenever a Pull Request is closed, the `CleanUpPull` function in the +[instrumented_pull_closed_executor.go](https://github.com/runatlantis/atlantis/blob/main/server/events/instrumented_pull_closed_executor.go) +file is called. This function cleans up all the closed Pull Request files, +locks, and other related information. diff --git a/runatlantis.io/contributing/glossary.md b/runatlantis.io/contributing/glossary.md new file mode 100644 index 0000000000..99c1e73287 --- /dev/null +++ b/runatlantis.io/contributing/glossary.md @@ -0,0 +1,26 @@ +# Glossary + +The Atlantis community uses many words and phrases to work more efficiently. +You will find the most common ones and their meaning on this page. + +## Pull / Merge Request Event + +The different VCSs have different names for merging changes. Atlantis uses the +name Pull Request as the abstraction. The VCS provider implements this +abstraction and forwards the call to the respective function. + +## VCS + +VCS stands for Version Control System. + +Atlantis supports only git as a Version Control System. However, there is +support for multiple VCS Providers. Currently, it supports the following +providers: + +- [Azure DevOps](https://azure.microsoft.com/en-us/products/devops) +- [BitBucket](https://bitbucket.org/) +- [GitHub](https://github.com/) +- [GitLab](https://gitlab.com/) +- [Gitea](https://gitea.com/) + +The term VCS is used for both git and the different VCS providers. diff --git a/runatlantis.io/docs.md b/runatlantis.io/docs.md new file mode 100644 index 0000000000..23b27f1c32 --- /dev/null +++ b/runatlantis.io/docs.md @@ -0,0 +1,18 @@ +--- +aside: false +--- +# Atlantis Documentation + +These docs are for users that are ready to get Atlantis installed and start using it. + +:::tip Looking to get started? +If you're new here, check out the [Guide](./guide.md) +where you can try our [Test Drive](./guide/test-drive.md) or [Run Atlantis Locally](./guide/testing-locally.md). +::: + +## Next Steps + +* [Installing Atlantis](./docs/installation-guide.md)  â€“  Get Atlantis up and running +* [Configuring Atlantis](./docs/configuring-atlantis.md)  â€“  Configure how Atlantis works for your specific use-cases +* [Using Atlantis](./docs/using-atlantis.md)  â€“  How do you use Atlantis? +* [How Atlantis Works](./docs/how-atlantis-works.md)  â€“  Internals of what Atlantis is doing diff --git a/runatlantis.io/docs/README.md b/runatlantis.io/docs/README.md deleted file mode 100644 index 5527692cf5..0000000000 --- a/runatlantis.io/docs/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Atlantis Documentation - -These docs are for users that are ready to get Atlantis installed and start using it. - -:::tip Looking to get started? -If you're new here, check out the [Guide](/guide/) -where you can try our [Test Drive](/guide/test-drive.html) or [Run Atlantis Locally](/guide/testing-locally.html). -::: - -### Next Steps -* [Installing Atlantis](/docs/installation-guide.html)  â€“  Get Atlantis up and running -* [Configuring Atlantis](configuring-atlantis.html)  â€“  Configure how Atlantis works for your specific use-cases -* [Using Atlantis](using-atlantis.html)  â€“  How do you use Atlantis? -* [How Atlantis Works](how-atlantis-works.html)  â€“  Internals of what Atlantis is doing diff --git a/runatlantis.io/docs/access-credentials.md b/runatlantis.io/docs/access-credentials.md index 9cd514fb70..d7b76573ce 100644 --- a/runatlantis.io/docs/access-credentials.md +++ b/runatlantis.io/docs/access-credentials.md @@ -1,10 +1,11 @@ # Git Host Access Credentials -This page describes how to create credentials for your Git host (GitHub, GitLab, Bitbucket, or Azure DevOps) + +This page describes how to create credentials for your Git host (GitHub, GitLab, Gitea, Bitbucket, or Azure DevOps) that Atlantis will use to make API calls. -[[toc]] ## Create an Atlantis user (optional) + We recommend creating a new user named **@atlantis** (or something close) or using a dedicated CI user. This isn't required (you can use an existing user or github app credentials), however all the comments that Atlantis writes @@ -14,19 +15,23 @@ will come from that user so it might be confusing if its coming from a personal <p align="center"><i>An example comment coming from the @atlantisbot user</i></p> ## Generating an Access Token + Once you've created a new user (or decided to use an existing one), you need to generate an access token. Read on for the instructions for your specific Git host: + * [GitHub](#github-user) * [GitHub app](#github-app) * [GitLab](#gitlab) +* [Gitea](#gitea) * [Bitbucket Cloud (bitbucket.org)](#bitbucket-cloud-bitbucket-org) * [Bitbucket Server (aka Stash)](#bitbucket-server-aka-stash) * [Azure DevOps](#azure-devops) ### GitHub user -- Create a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-fine-grained-personal-access-token) -- Create the token with **repo** scope -- Record the access token + +* Create a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-fine-grained-personal-access-token) +* Create the token with **repo** scope +* Record the access token ::: warning Your Atlantis user must also have "Write permissions" (for repos in an organization) or be a "Collaborator" (for repos in a user account) to be able to set commit statuses: ![Atlantis status](./images/status.png) @@ -35,18 +40,18 @@ Your Atlantis user must also have "Write permissions" (for repos in an organizat ### GitHub app #### Create the GitHub App Using Atlantis + ::: warning Available in Atlantis versions **newer** than 0.13.0. ::: +* Start Atlantis with fake github username and token (`atlantis server --gh-user fake --gh-token fake --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`). If installing as an **Organization**, remember to add `--gh-org your-github-org` to this command. +* Visit `https://$ATLANTIS_HOST/github-app/setup` and click on **Setup** to create the app on GitHub. You'll be redirected back to Atlantis +* A link to install your app, along with its secrets, will be shown on the screen. Record your app's credentials and install your app for your user/org by following said link. +* Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` +* Restart Atlantis with new flags: `atlantis server --gh-app-id <your id> --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret <your secret> --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. -- Start Atlantis with fake github username and token (`atlantis server --gh-user fake --gh-token fake --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`). If installing as an **Organization**, remember to add `--gh-org your-github-org` to this command. -- Visit `https://$ATLANTIS_HOST/github-app/setup` and click on **Setup** to create the app on GitHub. You'll be redirected back to Atlantis -- A link to install your app, along with its secrets, will be shown on the screen. Record your app's credentials and install your app for your user/org by following said link. -- Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` -- Restart Atlantis with new flags: `atlantis server --gh-app-id <your id> --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret <your secret> --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. - - NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](/docs/server-configuration.html#config-file). + NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](server-configuration.md#config-file). ::: warning Only a single installation per GitHub App is supported at the moment. @@ -58,13 +63,13 @@ GitHub App handles the webhook calls by itself, hence there is no need to create #### Manually Creating the GitHub app -- Create the GitHub app as an Administrator - - Ensure the app is registered / installed with the organization / user - - See the GitHub app [documentation](https://docs.github.com/en/apps/creating-github-apps/about-creating-github-apps/about-creating-github-apps) -- Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` -- Start Atlantis with the following flags: `atlantis server --gh-app-id <your id> --gh-installation-id <installation id> --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret <your secret> --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. +* Create the GitHub app as an Administrator + * Ensure the app is registered / installed with the organization / user + * See the GitHub app [documentation](https://docs.github.com/en/apps/creating-github-apps/about-creating-github-apps/about-creating-github-apps) +* Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` +* Start Atlantis with the following flags: `atlantis server --gh-app-id <your id> --gh-installation-id <installation id> --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret <your secret> --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. - NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](/docs/server-configuration.html#config-file). + NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](server-configuration.md#config-file). ::: tip NOTE Manually installing the GitHub app means that the credentials can be shared by many Atlantis installations. This has the benefit of centralizing repository access for shared modules / code. @@ -90,49 +95,67 @@ GitHub App needs these permissions. These are automatically set when a GitHub ap Since v0.19.7, a new permission for `Administration` has been added. If you have already created a GitHub app, updating Atlantis to v0.19.7 will not automatically add this permission, so you will need to set it manually. Since v0.22.3, a new permission for `Members` has been added, which is required for features that apply permissions to an organizations team members rather than individual users. Like the `Administration` permission above, updating Atlantis will not automatically add this permission, so if you wish to use features that rely on checking team membership you will need to add this manually. + +A new permission for `Actions` has been added, which is required for checking if a pull request is mergbeably bypassing the apply check. Updating Atlantis will not automatically add this permission, so you will need to add this manually. ::: -| Type | Access | -| --------------- | ------------------- | -| Administration | Read-only | -| Checks | Read and write | -| Commit statuses | Read and write | -| Contents | Read and write | -| Issues | Read and write | -| Metadata | Read-only (default) | -| Pull requests | Read and write | -| Webhooks | Read and write | -| Members | Read-only | +| Type | Access | +| --------------- | ------------------- | +| Administration | Read-only | +| Checks | Read and write | +| Commit statuses | Read and write | +| Contents | Read and write | +| Issues | Read and write | +| Metadata | Read-only (default) | +| Pull requests | Read and write | +| Webhooks | Read and write | +| Members | Read-only | +| Actions | Read-only | ### GitLab -- Follow: [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) -- Create a token with **api** scope -- Record the access token + +* Follow: [GitLab: Create a personal access token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) +* Create a token with **api** scope +* Record the access token + +### Gitea + +* Go to "Profile and Settings" > "Settings" in Gitea (top-right) +* Go to "Applications" under "User Settings" in Gitea +* Create a token under the "Manage Access Tokens" with the following permissions: + * issue: Read and Write + * repository: Read and Write + * user: Read +* Record the access token ### Bitbucket Cloud (bitbucket.org) -- Create an App Password by following [https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) -- Label the password "atlantis" -- Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them -- Record the access token + +* Create an App Password by following [BitBucket Cloud: Create an app password](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) +* Label the password "atlantis" +* Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them +* Record the access token ### Bitbucket Server (aka Stash) -- Click on your avatar in the top right and select **Manage account** -- Click **Personal access tokens** in the sidebar -- Click **Create a token** -- Name the token **atlantis** -- Give the token **Read** Project permissions and **Write** Pull request permissions -- Click **Create** and record the access token + +* Click on your avatar in the top right and select **Manage account** +* Click **Personal access tokens** in the sidebar +* Click **Create a token** +* Name the token **atlantis** +* Give the token **Read** Project permissions and **Write** Pull request permissions +* Click **Create** and record the access token NOTE: Atlantis will send the token as a [Bearer Auth to the Bitbucket API](https://confluence.atlassian.com/bitbucketserver/http-access-tokens-939515499.html#HTTPaccesstokens-UsingHTTPaccesstokens) instead of using Basic Auth. ### Azure DevOps -- Create a Personal access token by following [https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops) -- Label the password "atlantis" -- The minimum scopes required for this token are: - - Code (Read & Write) - - Code (Status) - - Member Entitlement Management (Read) -- Record the access token + +* Create a Personal access token by following [Azure DevOps: Use personal access tokens to authenticate](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops) +* Label the password "atlantis" +* The minimum scopes required for this token are: + * Code (Read & Write) + * Code (Status) + * Member Entitlement Management (Read) +* Record the access token ## Next Steps -Once you've got your user and access token, you're ready to create a webhook secret. See [Creating a Webhook Secret](webhook-secrets.html). + +Once you've got your user and access token, you're ready to create a webhook secret. See [Creating a Webhook Secret](webhook-secrets.md). diff --git a/runatlantis.io/docs/api-endpoints.md b/runatlantis.io/docs/api-endpoints.md index 96dd6d0b51..ce622979da 100644 --- a/runatlantis.io/docs/api-endpoints.md +++ b/runatlantis.io/docs/api-endpoints.md @@ -9,7 +9,7 @@ To enable the API endpoints, `api-secret` should be configured. :::tip Prerequisites -* Set `api-secret` as part of the [Server Configuration](server-configuration.html#api-secret) +* Set `api-secret` as part of the [Server Configuration](server-configuration.md#api-secret) * Pass `X-Atlantis-Token` with the same secret in the request header ::: @@ -17,22 +17,22 @@ To enable the API endpoints, `api-secret` should be configured. #### Description -Execute [atlantis plan](using-atlantis.html#atlantis-plan) on the specified repository. +Execute [atlantis plan](using-atlantis.md#atlantis-plan) on the specified repository. #### Parameters -| Name | Type | Required | Description | -|------------|-------------------------------------|----------|------------------------------------------| -| Repository | string | Yes | Name of the Terraform repository | -| Ref | string | Yes | Git reference, like a branch name | -| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | -| Paths | [ [Path](api-endpoints.html#path) ] | Yes | Paths to the projects to run the plan | -| PR | int | No | Pull Request number | +| Name | Type | Required | Description | +|------------|---------|----------|------------------------------------------| +| Repository | string | Yes | Name of the Terraform repository | +| Ref | string | Yes | Git reference, like a branch name | +| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | +| Paths | Path | Yes | Paths to the projects to run the plan | +| PR | int | No | Pull Request number | -##### Path +#### Path -Similar to the [Options](using-atlantis.html#options) of `atlantis plan`. Path specifies which directory/workspace -within the repository to run the plan. +Similar to the [Options](using-atlantis.md#options) of `atlantis plan`. Path specifies which directory/workspace +within the repository to run the plan. At least one of `Directory` or `Workspace` should be specified. | Name | Type | Required | Description | @@ -92,22 +92,22 @@ curl --request POST 'https://<ATLANTIS_HOST_NAME>/api/plan' \ #### Description -Execute [atlantis apply](using-atlantis.html#atlantis-apply) on the specified repository. +Execute [atlantis apply](using-atlantis.md#atlantis-apply) on the specified repository. #### Parameters -| Name | Type | Required | Description | -|------------|---------------------------------------|----------|------------------------------------------| -| Repository | string | Yes | Name of the Terraform repository | -| Ref | string | Yes | Git reference, like a branch name | -| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | -| Paths | [ [Path](api-endpoints.html#path-1) ] | Yes | Paths to the projects to run the apply | -| PR | int | No | Pull Request number | +| Name | Type | Required | Description | +|------------|--------|----------|------------------------------------------| +| Repository | string | Yes | Name of the Terraform repository | +| Ref | string | Yes | Git reference, like a branch name | +| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | +| Paths | Path | Yes | Paths to the projects to run the apply | +| PR | int | No | Pull Request number | -##### Path +#### Path -Similar to the [Options](using-atlantis.html#options-1) of `atlantis apply`. Path specifies which directory/workspace -within the repository to run the apply. +Similar to the [Options](using-atlantis.md#options-1) of `atlantis apply`. Path specifies which directory/workspace +within the repository to run the apply. At least one of `Directory` or `Workspace` should be specified. | Name | Type | Required | Description | diff --git a/runatlantis.io/docs/apply-requirements.md b/runatlantis.io/docs/apply-requirements.md index 870ac4972e..166931851d 100644 --- a/runatlantis.io/docs/apply-requirements.md +++ b/runatlantis.io/docs/apply-requirements.md @@ -1,5 +1,5 @@ # Apply Requirements :::warning REDIRECT -This page is moved to [Command Requirements](/docs/command-requirements.html). +This page is moved to [Command Requirements](command-requirements.md). ::: diff --git a/runatlantis.io/docs/automerging.md b/runatlantis.io/docs/automerging.md index 1e0b21ba77..5c2f96d34e 100644 --- a/runatlantis.io/docs/automerging.md +++ b/runatlantis.io/docs/automerging.md @@ -1,44 +1,70 @@ # Automerging + Atlantis can be configured to automatically merge a pull request after all plans have been successfully applied. - ![Automerge](./images/automerge.png) ## How To Enable + Automerging can be enabled either by: + 1. Passing the `--automerge` flag to `atlantis server`. This sets the parameter globally; however, explicit declaration in the repo config will be respected and take priority. 1. Setting `automerge: true` in the repo's `atlantis.yaml` file: + ```yaml version: 3 automerge: true projects: - dir: . ``` + :::tip NOTE If a repo has an `atlantis.yaml` file, then each project in the repo needs to be configured under the `projects` key. ::: ## How to Disable + If automerge is enabled, you can disable it for a single `atlantis apply` command with the `--auto-merge-disabled` option. +## How to set the merge method for automerge + +If automerge is enabled, you can use the `--auto-merge-method` option +for the `atlantis apply` command to specify which merge method use. + +```shell +atlantis apply --auto-merge-method <method> +``` + +The `method` must be one of: + +- merge +- rebase +- squash + +This is currently only implemented for the GitHub VCS. + ## Requirements ### All Plans Must Succeed + When automerge is enabled, **all plans** in a pull request **must succeed** before **any** plans can be applied. For example, imagine this scenario: + 1. I open a pull request that makes changes to two Terraform projects, in `dir1/` and `dir2/`. 1. The plan for `dir2/` fails because my Terraform syntax is wrong. In this scenario, I can't run -``` + +```shell atlantis apply -d dir1 ``` + Even though that plan succeeded, because **all** plans must succeed for **any** plans to be saved. @@ -47,8 +73,9 @@ autoplan. Then I will be able to apply both plans. ### All Plans must be applied -If multiple projects/dirs/workspaces are configured to be planned automatically, +If multiple projects/dirs/workspaces are configured to be planned automatically, then they should all be applied before Atlantis automatically merges the PR. ## Permissions + The Atlantis VCS user must have the ability to merge pull requests. diff --git a/runatlantis.io/docs/autoplanning.md b/runatlantis.io/docs/autoplanning.md index 2183219703..b4657d801a 100644 --- a/runatlantis.io/docs/autoplanning.md +++ b/runatlantis.io/docs/autoplanning.md @@ -1,8 +1,10 @@ # Autoplanning + On any **new** pull request or **new commit** to an existing pull request, Atlantis will attempt to run `terraform plan` in the directories it thinks hold modified Terraform projects. The algorithm it uses is as follows: + 1. Get list of all modified files in pull request 1. Filter to those containing `.tf` 1. Get the directories that those files are in @@ -11,8 +13,10 @@ The algorithm it uses is as follows: contains a `main.tf` run plan in that directory, otherwise ignore the change (see below for exceptions). ## Example + Given the directory structure: -``` + +```plain . ├── modules │   └── module1 @@ -26,21 +30,25 @@ Given the directory structure: * If `project1/main.tf` were modified, we would run `plan` in `project1` * If `modules/module1/main.tf` were modified, we would not automatically run `plan` because we couldn't determine the location of the terraform project - * You could use an [atlantis.yaml](repo-level-atlantis-yaml.html#configuring-planning) file to specify which projects to plan when this module changed - * You could enable [module autoplanning](server-configuration.html#autoplan-modules) which indexes projects to their local module dependencies. - * Or you could manually plan with `atlantis plan -d <dir>` + * You could use an [atlantis.yaml](repo-level-atlantis-yaml.md#configuring-planning) file to specify which projects to plan when this module changed + * You could enable [module autoplanning](server-configuration.md#autoplan-modules) which indexes projects to their local module dependencies. + * Or you could manually plan with `atlantis plan -d <dir>` * If `project1/modules/module1/main.tf` were modified, we would look one level above `project1/modules` into `project1/`, see that there was a `main.tf` file and so run plan in `project1/` ## Bitbucket-Specific Notes + Bitbucket does not have a webhook that triggers only upon a new PR or commit. To fix this we cache the last commit to see if it has changed. If the cache is emptied, Atlantis will think your commit is new and you may see extra plans. This scenario can happen if: + * Atlantis restarts * You are running multiple Atlantis instances behind a load balancer ## Customizing + If you would like to customize how Atlantis determines which directory to run in or disable it all together you need to create an `atlantis.yaml` file. See -* [Disabling Autoplanning](repo-level-atlantis-yaml.html#disabling-autoplanning) -* [Configuring Planning](repo-level-atlantis-yaml.html#configuring-planning) + +* [Disabling Autoplanning](repo-level-atlantis-yaml.md#disabling-autoplanning) +* [Configuring Planning](repo-level-atlantis-yaml.md#configuring-planning) diff --git a/runatlantis.io/docs/checkout-strategy.md b/runatlantis.io/docs/checkout-strategy.md index 066f7444f0..5c38586a4c 100644 --- a/runatlantis.io/docs/checkout-strategy.md +++ b/runatlantis.io/docs/checkout-strategy.md @@ -7,6 +7,7 @@ variable that get passed to the `atlantis server` command. Atlantis supports `branch` and `merge` strategies. ## Branch + If set to `branch` (the default), Atlantis will check out the source branch of the pull request. @@ -17,6 +18,7 @@ If the pull request was asking to merge `branch` into `main`, Atlantis would check out `branch` at commit `C3`. ## Merge + The problem with the `branch` strategy, is that if users push branches that are out of date with `main`, then their `terraform plan` could be deleting some resources that were configured in the main branch. @@ -49,9 +51,9 @@ commit is pushed to `main` **after** Atlantis runs `plan`, nothing will happen. To optimize cloning time, Atlantis can perform a shallow clone by specifying the `--checkout-depth` flag. The cloning is performed in a following manner: -- Shallow clone of the default branch is performed with depth of `--checkout-depth` value of zero (full clone). -- `branch` is retrieved, including the same amount of commits. -- Merge base of the default branch and `branch` is checked for existence in the shallow clone. -- If the merge base is not present, it means that either of the branches are ahead of the merge base by more than `--checkout-depth` commits. In this case full repo history is fetched. +* Shallow clone of the default branch is performed with depth of `--checkout-depth` value of zero (full clone). +* `branch` is retrieved, including the same amount of commits. +* Merge base of the default branch and `branch` is checked for existence in the shallow clone. +* If the merge base is not present, it means that either of the branches are ahead of the merge base by more than `--checkout-depth` commits. In this case full repo history is fetched. -If the commit history often diverges by more than the default checkout depth then the `--checkout-depth` flag should be tuned to avoid full fetches. \ No newline at end of file +If the commit history often diverges by more than the default checkout depth then the `--checkout-depth` flag should be tuned to avoid full fetches. diff --git a/runatlantis.io/docs/command-requirements.md b/runatlantis.io/docs/command-requirements.md index e3aea4ea21..bbf12ede00 100644 --- a/runatlantis.io/docs/command-requirements.md +++ b/runatlantis.io/docs/command-requirements.md @@ -1,7 +1,7 @@ # Command Requirements -[[toc]] ## Intro + Atlantis requires certain conditions be satisfied **before** `atlantis apply` and `atlantis import` commands can be run: @@ -10,31 +10,41 @@ commands can be run: * [UnDiverged](#undiverged) - requires pull requests to be ahead of the base branch ## What Happens If The Requirement Is Not Met? + If the requirement is not met, users will see an error if they try to run `atlantis apply`: ![Mergeable Apply Requirement](./images/apply-requirement.png) ## Supported Requirements + ### Approved + The `approved` requirement will prevent applies unless the pull request is approved by at least one person other than the author. #### Usage + The `approved` requirement by: + 1. Creating a `repos.yaml` file with the `apply_requirements` key: + ```yaml repos: - id: /.*/ apply_requirements: [approved] ``` + 1. Or by allowing an `atlantis.yaml` file to specify the `apply_requirements` key in the `repos.yaml` config: - #### repos.yaml + + **repos.yaml** + ```yaml repos: - id: /.*/ allowed_overrides: [apply_requirements] ``` - #### atlantis.yaml + **atlantis.yaml** + ```yaml version: 3 projects: @@ -43,7 +53,9 @@ The `approved` requirement by: ``` #### Meaning + Each VCS provider has different rules around who can approve: + * **GitHub** – **Any user with read permissions** to the repo can approve a pull request * **GitLab** – The user who can approve can be set in the [repo settings](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/) * **Bitbucket Cloud (bitbucket.org)** – A user can approve their own pull request but @@ -57,11 +69,15 @@ To require **certain people** to approve the pull request, look at the ::: ### Mergeable + The `mergeable` requirement will prevent applies unless a pull request is able to be merged. #### Usage + Set the `mergeable` requirement by: + 1. Creating a `repos.yaml` file with the `apply_requirements` key: + ```yaml repos: - id: /.*/ @@ -69,14 +85,17 @@ Set the `mergeable` requirement by: ``` 1. Or by allowing an `atlantis.yaml` file to specify `plan_requirements`, `apply_requirements` and `import_requirements` keys in the `repos.yaml` config: - #### repos.yaml + + **repos.yaml** + ```yaml repos: - id: /.*/ allowed_overrides: [plan_requirements, apply_requirements, import_requirements] ``` - #### atlantis.yaml + **atlantis.yaml** + ```yaml version: 3 projects: @@ -87,25 +106,28 @@ Set the `mergeable` requirement by: ``` #### Meaning + Each VCS provider has a different concept of "mergeability": ::: warning Some VCS providers have a feature for branch protection to control "mergeability". To use it, limit the base branch so to not bypass the branch protection. -See also the `branch` keyword in [Server Side Repo Config](server-side-repo-config.html#reference) for more details. +See also the `branch` keyword in [Server Side Repo Config](server-side-repo-config.md#reference) for more details. ::: #### GitHub + In GitHub, if you're not using [Protected Branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches) then all pull requests are mergeable unless there is a conflict. If you set up Protected Branches then you can enforce: + * Requiring certain status checks to be passing * Requiring certain people to have reviewed and approved the pull request * Requiring `CODEOWNERS` to have reviewed and approved the pull request -* Requiring that the branch is up to date with `main` +* Requiring that the branch is up-to-date with `main` -See [https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches) +See [GitHub: About protected branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches) for more details. ::: warning @@ -119,6 +141,7 @@ If you set `atlantis/apply` to the mergeable requirement, use the `--gh-allow-me ::: #### GitLab + For GitLab, a merge request will be merged if there are no conflicts, no unresolved discussions if it is a project requirement and if all necessary approvers have approved the pull request. For pipelines, if the project requires that pipelines must succeed, all builds except the apply command status will be checked. @@ -126,6 +149,7 @@ For pipelines, if the project requires that pipelines must succeed, all builds e For Jobs with allow_failure setting set to true, will be ignored. If the pipeline has been skipped and the project allows merging, it will be marked as mergeable. #### Bitbucket.org (Bitbucket Cloud) and Bitbucket Server (Stash) + For Bitbucket, we just check if there is a conflict that is preventing a merge. We don't check anything else because Bitbucket's API doesn't support it. @@ -133,9 +157,11 @@ If you need a specific check, please [open an issue](https://github.com/runatlantis/atlantis/issues/new). #### Azure DevOps + In Azure DevOps, all pull requests are mergeable unless there is a conflict. You can set a pull request to "Complete" right away, or set "Auto-Complete", which will merge after all branch policies are met. See [Review code with pull requests](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops). [Branch policies](https://docs.microsoft.com/en-us/azure/devops/repos/git/branch-policies?view=azure-devops) can: + * Require a minimum number of reviewers * Allow users to approve their own changes * Allow completion even if some reviewers vote "Waiting" or "Reject" @@ -147,12 +173,16 @@ At this time, the Azure DevOps client only supports merging using the default 'n ::: ### UnDiverged + Prevent applies if there are any changes on the base branch since the most recent plan. Applies to `merge` checkout strategy only which you need to set via `--checkout-strategy` flag. #### Usage + You can set the `undiverged` requirement by: + 1. Creating a `repos.yaml` file with `plan_requirements`, `apply_requirements` and `import_requirements` keys: + ```yaml repos: - id: /.*/ @@ -160,15 +190,19 @@ You can set the `undiverged` requirement by: apply_requirements: [undiverged] import_requirements: [undiverged] ``` + 1. Or by allowing an `atlantis.yaml` file to specify the `plan_requirements`, `apply_requirements` and `import_requirements` keys in your `repos.yaml` config: - #### repos.yaml + + **repos.yaml** + ```yaml repos: - id: /.*/ allowed_overrides: [plan_requirements, apply_requirements, import_requirements] ``` - #### atlantis.yaml + **atlantis.yaml** + ```yaml version: 3 projects: @@ -177,7 +211,9 @@ You can set the `undiverged` requirement by: apply_requirements: [undiverged] import_requirements: [undiverged] ``` + #### Meaning + The `merge` checkout strategy creates a temporary merge commit and runs the `plan` on the Atlantis local version of the PR source and destination branch. The local destination branch can become out of date since changes to the destination branch are not fetched if there are no changes to the source branch. `undiverged` enforces that Atlantis local version of main is up to date @@ -185,16 +221,21 @@ with remote so that the state of the source during the `apply` is identical to t time. ## Setting Command Requirements + As mentioned above, you can set command requirements via flags, in `repos.yaml`, or in `atlantis.yaml` if `repos.yaml` allows the override. ### Flags Override + Flags **override** any `repos.yaml` or `atlantis.yaml` settings so they are equivalent to always having that apply requirement set. ### Project-Specific Settings + If you only want some projects/repos to have apply requirements, then you must + 1. Specifying which repos have which requirements via the `repos.yaml` file. + ```yaml repos: - id: /.*/ @@ -221,7 +262,9 @@ If you only want some projects/repos to have apply requirements, then you must config. For example if I have two directories, `staging` and `production`, I might use: - #### repos.yaml + + **repos.yaml:** + ```yaml repos: - id: /.*/ @@ -229,7 +272,8 @@ If you only want some projects/repos to have apply requirements, then you must # Allow any repo to specify apply_requirements in atlantis.yaml ``` - #### atlantis.yaml + **atlantis.yaml:** + ```yaml version: 3 projects: @@ -248,14 +292,17 @@ If you only want some projects/repos to have apply requirements, then you must ``` ### Multiple Requirements + You can set any or all of `approved`, `mergeable`, and `undiverged` requirements. ## Who Can Apply? + Once the apply requirement is satisfied, **anyone** that can comment on the pull request can run the actual `atlantis apply` command. ## Next Steps -* For more information on GitHub pull request reviews and approvals see: [https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews) -* For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: [https://docs.gitlab.com/ee/user/project/merge_requests/approvals/](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/). -* For more information on Bitbucket pull request reviews and approvals see: [https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html](https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html) -* For more information on Azure DevOps pull request reviews and approvals see: [https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser) + +* For more information on GitHub pull request reviews and approvals see: [GitHub: About pull request reviews](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews) +* For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: [GitLab: Merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/). +* For more information on Bitbucket pull request reviews and approvals see: [BitBucket: Use pull requests for code review](https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html) +* For more information on Azure DevOps pull request reviews and approvals see: [Azure DevOps: Create pull requests](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser) diff --git a/runatlantis.io/docs/configuring-atlantis.md b/runatlantis.io/docs/configuring-atlantis.md index 46edbbbc3c..c40e55560c 100644 --- a/runatlantis.io/docs/configuring-atlantis.md +++ b/runatlantis.io/docs/configuring-atlantis.md @@ -1,25 +1,29 @@ # Configuring Atlantis There are three methods for configuring Atlantis: + 1. Passing flags to the `atlantis server` command 1. Creating a server-side repo config file and using the `--repo-config` flag 1. Placing an `atlantis.yaml` file at the root of your Terraform repositories ## Flags + Flags to `atlantis server` are used to configure the global operation of Atlantis, for example setting credentials for your Git Host or configuring SSL certs. -See [Server Configuration](server-configuration.html) for more details. +See [Server Configuration](server-configuration.md) for more details. ## Server-Side Repo Config + A Server-Side Repo Config file is used to control per-repo behaviour and what users can do in repo-level `atlantis.yaml` files. -See [Server-Side Repo Config](server-side-repo-config.html) for more details. +See [Server-Side Repo Config](server-side-repo-config.md) for more details. ## Repo-Level `atlantis.yaml` Files + `atlantis.yaml` files placed at the root of your Terraform repos can be used to change the default Atlantis behaviour for each repo. -See [Repo-Level atlantis.yaml Files](repo-level-atlantis-yaml.html) for more details. +See [Repo-Level atlantis.yaml Files](repo-level-atlantis-yaml.md) for more details. diff --git a/runatlantis.io/docs/configuring-webhooks.md b/runatlantis.io/docs/configuring-webhooks.md index be285ef6bc..295b50f437 100644 --- a/runatlantis.io/docs/configuring-webhooks.md +++ b/runatlantis.io/docs/configuring-webhooks.md @@ -1,16 +1,18 @@ # Configuring Webhooks + Atlantis needs to receive Webhooks from your Git host so that it can respond to pull request events. :::tip Prerequisites + * You have created an [access credential](access-credentials.md) * You have created a [webhook secret](webhook-secrets.md) * You have [deployed](deployment.md) Atlantis and have a url for it ::: See the instructions for your specific provider below. -[[toc]] ## GitHub/GitHub Enterprise + You can install your webhook at the [organization](https://docs.github.com/en/get-started/learning-about-github/types-of-github-accounts) level, or for each individual repository. ::: tip NOTE @@ -22,101 +24,130 @@ When authenticating as a GitHub App, Webhooks are automatically created and need If you're installing on the organization, navigate to your organization's page and click **Settings**. If installing on a single repository, navigate to the repository home page and click **Settings**. -- Select **Webhooks** or **Hooks** in the sidebar -- Click **Add webhook** -- set **Payload URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- set **Content type** to `application/json` -- set **Secret** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- select **Let me select individual events** -- check the boxes - - **Pull request reviews** - - **Pushes** - - **Issue comments** - - **Pull requests** -- leave **Active** checked -- click **Add webhook** -- See [Next Steps](#next-steps) + +* Select **Webhooks** or **Hooks** in the sidebar +* Click **Add webhook** +* set **Payload URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* set **Content type** to `application/json` +* set **Secret** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* select **Let me select individual events** +* check the boxes + * **Pull request reviews** + * **Pushes** + * **Issue comments** + * **Pull requests** +* leave **Active** checked +* click **Add webhook** +* See [Next Steps](#next-steps) ## GitLab + If you're using GitLab, navigate to your project's home page in GitLab -- Click **Settings > Webhooks** in the sidebar -- set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- set **Secret Token** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- check the boxes - - **Push events** - - **Comments** - - **Merge Request events** -- leave **Enable SSL verification** checked -- click **Add webhook** -- See [Next Steps](#next-steps) + +* Click **Settings > Webhooks** in the sidebar +* set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* set **Secret Token** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* check the boxes + * **Push events** + * **Comments** + * **Merge Request events** +* leave **Enable SSL verification** checked +* click **Add webhook** +* See [Next Steps](#next-steps) + +## Gitea + +If you're using Gitea, navigate to your project's home page in Gitea + +* Click **Settings > Webhooks** in the top- and then sidebar +* Click **Add webhook > Gitea** (Gitea webhooks are service specific, but this works) +* set **Target URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* set **Secret** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* Select **Custom Events...** +* Check the boxes + * **Repository events > Push** + * **Issue events > Issue Comment** + * **Pull Request events > Pull Request** + * **Pull Request events > Pull Request Comment** + * **Pull Request events > Pull Request Reviewed** + * **Pull Request events > Pull Request Synchronized** +* Leave **Active** checked +* Click **Add Webhook** +* See [Next Steps](#next-steps) ## Bitbucket Cloud (bitbucket.org) -- Go to your repo's home page -- Click **Settings** in the sidebar -- Click **Webhooks** under the **WORKFLOW** section -- Click **Add webhook** -- Enter "Atlantis" for **Title** -- set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- Keep **Status** as Active -- Don't check **Skip certificate validation** because NGROK has a valid cert. -- Select **Choose from a full list of triggers** -- Under **Repository** **un**check everything -- Under **Issues** leave everything **un**checked -- Under **Pull Request**, select: Created, Updated, Merged, Declined and Comment created -- Click **Save** + +* Go to your repo's home page +* Click **Settings** in the sidebar +* Click **Webhooks** under the **WORKFLOW** section +* Click **Add webhook** +* Enter "Atlantis" for **Title** +* set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* Keep **Status** as Active +* Don't check **Skip certificate validation** because NGROK has a valid cert. +* Select **Choose from a full list of triggers** +* Under **Repository** **un**check everything +* Under **Issues** leave everything **un**checked +* Under **Pull Request**, select: Created, Updated, Merged, Declined and Comment created +* Click **Save** <img src="../guide/images/bitbucket-webhook.png" alt="Bitbucket Webhook" style="max-height: 500px"> -- See [Next Steps](#next-steps) +* See [Next Steps](#next-steps) ## Bitbucket Server (aka Stash) -- Go to your repo's home page -- Click **Settings** in the sidebar -- Click **Webhooks** under the **WORKFLOW** section -- Click **Create webhook** -- Enter "Atlantis" for **Name** -- set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- Double-check you added `/events` to the end of your URL. -- Set **Secret** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- Under **Pull Request**, select: Opened, Source branch updated, Merged, Declined, Deleted and Comment added -- Click **Save**<img src="../guide/images/bitbucket-server-webhook.png" alt="Bitbucket Webhook" style="max-height: 600px;"> -- See [Next Steps](#next-steps) + +* Go to your repo's home page +* Click **Settings** in the sidebar +* Click **Webhooks** under the **WORKFLOW** section +* Click **Create webhook** +* Enter "Atlantis" for **Name** +* set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* Double-check you added `/events` to the end of your URL. +* Set **Secret** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* Under **Pull Request**, select: Opened, Source branch updated, Merged, Declined, Deleted and Comment added +* Click **Save**<img src="../guide/images/bitbucket-server-webhook.png" alt="Bitbucket Webhook" style="max-height: 600px;"> +* See [Next Steps](#next-steps) ## Azure DevOps + Webhooks are installed at the [team project](https://docs.microsoft.com/en-us/azure/devops/organizations/projects/about-projects?view=azure-devops) level, but may be restricted to only fire based on events pertaining to [specific repos](https://docs.microsoft.com/en-us/azure/devops/service-hooks/services/webhooks?view=azure-devops) within the team project. -- Navigate anywhere within a team project, ie: `https://dev.azure.com/orgName/projectName/_git/repoName` -- Select **Project settings** in the lower-left corner -- Select **Service hooks** - - If you see the message "You do not have sufficient permissions to view or configure subscriptions." you need to ensure your user is a member of either the organization's "Project Collection Administrators" group or the project's "Project Administrators" group. - - To add your user to the Project Collection Build Administrators group, navigate to the organization level, click **Organization Settings** and then click **Permissions**. You should be at `https://dev.azure.com/<organization>/_settings/groups`. Now click on the **\<organization\>/Project Collection Administrators** group and add your user as a member. - - To add your user to the Project Administrators group, navigate to the project level, click **Project Settings** and then click **Permissions**. You should be at `https://dev.azure.com/<organization>/<project>/_settings/permissions`. Now click on the **\<project\>/Project Administrators** group and add your user as a member. -- Click **Create subscription** or the green plus icon to add a new webhook -- Scroll to the bottom of the list and select **Web Hooks** -- Click **Next** -- Under "Trigger on this type of event", select **Pull request created** - - Optionally, select a repository under **Filters** to restrict the scope of this webhook subscription to a specific repository -- Click **Next** -- Set **URL** to `http://$URL/events` where `$URL` is where Atlantis is hosted. Note that SSL, or `https://$URL/events`, is required if you set a Basic username and password for the webhook). **Be sure to add `/events`** -- It is strongly recommended to set a Basic Username and Password for all webhooks -- Leave all three drop-down menus for `...to send` set to **All** -- Resource version should be set to **1.0** for `Pull request created` and `Pull request updated` event types and **2.0** for `Pull request commented on` -- **NOTE** If you're adding a webhook to multiple team projects or repositories (using filters), each repository will need to use the **same** basic username and password. -- Click **Finish** +* Navigate anywhere within a team project, ie: `https://dev.azure.com/orgName/projectName/_git/repoName` +* Select **Project settings** in the lower-left corner +* Select **Service hooks** + * If you see the message "You do not have sufficient permissions to view or configure subscriptions." you need to ensure your user is a member of either the organization's "Project Collection Administrators" group or the project's "Project Administrators" group. + * To add your user to the Project Collection Build Administrators group, navigate to the organization level, click **Organization Settings** and then click **Permissions**. You should be at `https://dev.azure.com/<organization>/_settings/groups`. Now click on the **\<organization\>/Project Collection Administrators** group and add your user as a member. + * To add your user to the Project Administrators group, navigate to the project level, click **Project Settings** and then click **Permissions**. You should be at `https://dev.azure.com/<organization>/<project>/_settings/permissions`. Now click on the **\<project\>/Project Administrators** group and add your user as a member. +* Click **Create subscription** or the green plus icon to add a new webhook +* Scroll to the bottom of the list and select **Web Hooks** +* Click **Next** +* Under "Trigger on this type of event", select **Pull request created** + * Optionally, select a repository under **Filters** to restrict the scope of this webhook subscription to a specific repository +* Click **Next** +* Set **URL** to `http://$URL/events` where `$URL` is where Atlantis is hosted. Note that SSL, or `https://$URL/events`, is required if you set a Basic username and password for the webhook). **Be sure to add `/events`** +* It is strongly recommended to set a Basic Username and Password for all webhooks +* Leave all three drop-down menus for `...to send` set to **All** +* Resource version should be set to **1.0** for `Pull request created` and `Pull request updated` event types and **2.0** for `Pull request commented on` +* **NOTE** If you're adding a webhook to multiple team projects or repositories (using filters), each repository will need to use the **same** basic username and password. +* Click **Finish** Repeat the process above until you have webhook subscriptions for the following event types that will trigger on all repositories Atlantis will manage: -- Pull request created (you just added this one) -- Pull request updated -- Pull request commented on +* Pull request created (you just added this one) +* Pull request updated +* Pull request commented on -- See [Next Steps](#next-steps) +* See [Next Steps](#next-steps) ## Next Steps + * To verify that Atlantis is receiving your webhooks, create a test pull request to your repo. * You should see the request show up in the Atlantis logs at an `INFO` level. * You'll now need to configure Atlantis to add your [Provider Credentials](provider-credentials.md) diff --git a/runatlantis.io/docs/custom-policy-checks.md b/runatlantis.io/docs/custom-policy-checks.md index 9c6d362803..4c353335c7 100644 --- a/runatlantis.io/docs/custom-policy-checks.md +++ b/runatlantis.io/docs/custom-policy-checks.md @@ -1,9 +1,11 @@ # Custom Policy Checks -If you want to run custom policy tools or scripts instead of the built-in Conftest integration, you can do so by setting the `custom_policy_check` option and running it in a custom workflow. Note: custom policy tool output is simply parsed for "fail" substrings to determine if the policy set passed. -This option can be configured either at the server-level in a [repos.yaml config file](server-configuration.md) or at the repo-level in an [atlantis.yaml file.](repo-level-atlantis-yaml.md). +If you want to run custom policy tools or scripts instead of the built-in Conftest integration, you can do so by setting the `custom_policy_check` option and running it in a custom workflow. Note: custom policy tool output is simply parsed for "fail" substrings to determine if the policy set passed. + +This option can be configured either at the server-level in a [repos.yaml config file](server-configuration.md) or at the repo-level in an [atlantis.yaml file.](repo-level-atlantis-yaml.md). ## Server-side config example + Set the `policy_check` and `custom_policy_check` options to true, and run the custom tool in the policy check steps as seen below. ```yaml @@ -30,8 +32,8 @@ policies: source: local ``` - ## Repo-level atlantis.yaml example + First, you will need to ensure `custom_policy_check` is within the `allowed_overrides` field of the server-side config. Next, just set the custom option to true on the specific project you want as shown in the example `atlantis.yaml` below: ```yaml diff --git a/runatlantis.io/docs/custom-workflows.md b/runatlantis.io/docs/custom-workflows.md index 3d8da5a566..af655abf26 100644 --- a/runatlantis.io/docs/custom-workflows.md +++ b/runatlantis.io/docs/custom-workflows.md @@ -3,23 +3,25 @@ Custom workflows can be defined to override the default commands that Atlantis runs. -[[toc]] - ## Usage + Custom workflows can be specified in the Server-Side Repo Config or in the Repo-Level `atlantis.yaml` files. -**Notes** +**Notes:** + * If you want to allow repos to select their own workflows, they must have the -`allowed_overrides: [workflow]` setting. See [server-side repo config use cases](server-side-repo-config.html#allow-repos-to-choose-a-server-side-workflow) for more details. +`allowed_overrides: [workflow]` setting. See [server-side repo config use cases](server-side-repo-config.md#allow-repos-to-choose-a-server-side-workflow) for more details. * If in addition you also want to allow repos to define their own workflows, they must have the -`allow_custom_workflows: true` setting. See [server-side repo config use cases](server-side-repo-config.html#allow-repos-to-define-their-own-workflows) for more details. - +`allow_custom_workflows: true` setting. See [server-side repo config use cases](server-side-repo-config.md#allow-repos-to-define-their-own-workflows) for more details. ## Use Cases + ### .tfvars files + Given the structure: -``` + +```plain . └── project1 ├── main.tf @@ -29,6 +31,7 @@ Given the structure: If you wanted Atlantis to automatically run plan with `-var-file staging.tfvars` and `-var-file production.tfvars` you could define two workflows: + ```yaml # repos.yaml or atlantis.yaml workflows: @@ -40,7 +43,7 @@ workflows: extra_args: ["-var-file", "staging.tfvars"] # NOTE: no need to define the apply stage because it will default # to the normal apply stage. - + production: plan: steps: @@ -62,7 +65,9 @@ workflows: - state_rm: extra_args: ["-lock=false"] ``` + Then in your repo-level `atlantis.yaml` file, you would reference the workflows: + ```yaml # atlantis.yaml version: 3 @@ -80,20 +85,27 @@ workflows: # If you didn't define the workflows in your server-side repos.yaml config, # you would define them here instead. ``` + When you want to apply the plans, you can comment -``` + +```shell atlantis apply -p project1-staging ``` + and -``` + +```shell atlantis apply -p project1-production ``` + Where `-p` refers to the project name. ### Adding extra arguments to Terraform commands + If you need to append flags to `terraform plan` or `apply` temporarily, you can append flags on a comment following `--`, for example commenting: -``` + +```shell atlantis plan -- -lock=false ``` @@ -117,7 +129,7 @@ workflows: extra_args: ["-lock=false"] ``` -If [policy checking](/docs/policy-checking.html#how-it-works) is enabled, `extra_args` can also be used to change the default behaviour of conftest. +If [policy checking](policy-checking.md#how-it-works) is enabled, `extra_args` can also be used to change the default behaviour of conftest. ```yaml workflows: @@ -130,6 +142,7 @@ workflows: ``` ### Custom init/plan/apply Commands + If you want to customize `terraform init`, `plan` or `apply` in ways that aren't supported by `extra_args`, you can completely override those commands. @@ -147,11 +160,11 @@ workflows: - run: command: terraform init -input=false output: hide - + # If you're using workspaces you need to select the workspace using the # $WORKSPACE environment variable. - run: terraform workspace select $WORKSPACE - + # You MUST output the plan using -out $PLANFILE because Atlantis expects # plans to be in a specific location. - run: terraform plan -input=false -refresh -out $PLANFILE @@ -162,14 +175,15 @@ workflows: ``` ### CDKTF + Here are the requirements to enable [CDKTF](https://developer.hashicorp.com/terraform/cdktf) -- A custom image with `CDKTF` installed -- Add `**/cdk.tf.json` to the list of Atlantis autoplan files. -- Set the `atlantis-include-git-untracked-files` flag so that the Terraform files dynamically generated +* A custom image with `CDKTF` installed +* Add `**/cdk.tf.json` to the list of Atlantis autoplan files. +* Set the `atlantis-include-git-untracked-files` flag so that the Terraform files dynamically generated by CDKTF will be add to the Atlantis modified file list. -- Use `pre_workflow_hooks` to run `cdktf synth` -- Optional: There isn't a requirement to use a repo `atlantis.yaml` but one can be leveraged if needed. +* Use `pre_workflow_hooks` to run `cdktf synth` +* Optional: There isn't a requirement to use a repo `atlantis.yaml` but one can be leveraged if needed. #### Custom Image @@ -192,6 +206,7 @@ ATLANTIS_INCLUDE_GIT_UNTRACKED_FILES=true OR `atlantis server --config config.yaml` + ```yaml # config.yaml autoplan-file-list: "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/cdk.tf.json" @@ -203,6 +218,7 @@ include-git-untracked-files: true Use `pre_workflow_hooks` `atlantis server --repo-config="repos.yaml"` + ```yaml # repos.yaml repos: @@ -234,7 +250,7 @@ $ tree --gitignore 1. Container orchestrator (k8s/fargate/ecs/etc) uses the custom docker image of atlantis with `cdktf` installed with the `--autoplan-file-list` to trigger on `cdk.tf.json` files and `--include-git-untracked-files` set to include the -CDKTF dynamically generated Terraform files in the Atlantis plan. +CDKTF dynamically generated Terraform files in the Atlantis plan. 1. PR branch is pushed up containing `cdktf` code changes. 1. Atlantis checks out the branch in the repo. 1. Atlantis runs the `npm i && cdktf get && cdktf synth` command in the repo root as a step in `pre_workflow_hooks`, @@ -243,6 +259,7 @@ generating the `cdk.tf.json` Terraform files. 1. Atlantis then runs `terraform` workflows in the respective directories as usual. ### Terragrunt + Atlantis supports running custom commands in place of the default Atlantis commands. We can use this functionality to enable [Terragrunt](https://github.com/gruntwork-io/terragrunt). @@ -250,7 +267,8 @@ commands. We can use this functionality to enable You can either use your repo's `atlantis.yaml` file or the Atlantis server's `repos.yaml` file. Given a directory structure: -``` + +```plain . └── live    ├── prod @@ -315,6 +333,7 @@ workflows: ``` If using the repo's `atlantis.yaml` file you would use the following config: + ```yaml version: 3 projects: @@ -350,10 +369,9 @@ workflows: **NOTE:** If using the repo's `atlantis.yaml` file, you will need to specify each directory that is a Terragrunt project. - ::: warning Atlantis will need to have the `terragrunt` binary in its PATH. -If you're using Docker you can build your own image, see [Customization](/docs/deployment.html#customization). +If you're using Docker you can build your own image, see [Customization](deployment.md#customization). ::: If you don't want to create/manage the repo's `atlantis.yaml` file yourself, you can use the tool [terragrunt-atlantis-config](https://github.com/transcend-io/terragrunt-atlantis-config) to generate it. @@ -361,6 +379,7 @@ If you don't want to create/manage the repo's `atlantis.yaml` file yourself, you The `terragrunt-atlantis-config` tool is a community project and not maintained by the Atlantis team. ### Running custom commands + Atlantis supports running completely custom commands. In this example, we want to run a script after every `apply`: @@ -375,17 +394,19 @@ workflows: ``` ::: tip Notes + * We don't need to write a `plan` key under `myworkflow`. If `plan` isn't set, Atlantis will use the default plan workflow which is what we want in this case. * A custom command will only terminate if all output file descriptors are closed. Therefore a custom command can only be sent to the background (e.g. for an SSH tunnel during the terraform run) when its output is redirected to a different location. For example, Atlantis -will execute a custom script containing the following code to create a SSH tunnel correctly: +will execute a custom script containing the following code to create a SSH tunnel correctly: `ssh -f -M -S /tmp/ssh_tunnel -L 3306:database:3306 -N bastion 1>/dev/null 2>&1`. Without the redirect, the script would block the Atlantis workflow. ::: ### Custom Backend Config + If you need to specify the `-backend-config` flag to `terraform init` you'll need to use a custom workflow. In this example, we're using custom backend files to configure two remote states, one for each environment. We're then using `.tfvars` files to load different variables for each environment. @@ -410,12 +431,14 @@ workflows: - plan: extra_args: [-var-file=production.tfvars] ``` + ::: warning NOTE We have to use a custom `run` step to `rm -rf .terraform` because otherwise Terraform will complain in-between commands since the backend config has changed. ::: You would then reference the workflows in your repo-level `atlantis.yaml`: + ```yaml version: 3 projects: @@ -427,8 +450,69 @@ projects: workflow: production ``` +### Add directory and repo context for aws resources using default tags + +This is only available in AWS provider version [5.62.0](https://github.com/hashicorp/terraform-provider-aws/releases/tag/v5.62.0) and higher. + +This configuration will create the following tags + +* `repository` equal to `github.com/<owner>/<repo>` which can be changed for gitlab or other VCS +* `repository_dir` equal to the relative directory + +Other default variables can be added such as for workspace. See below for more available environment variables. + +```yaml +workflows: + terraform: + plan: + steps: + # These env vars TF_AWS_DEFAULT_TAGS_ will work for aws provider 5.62.0+ + # https://github.com/hashicorp/terraform-provider-aws/releases/tag/v5.62.0 + - &env_default_tags_repository + env: + name: TF_AWS_DEFAULT_TAGS_repository + command: 'echo "github.com/${BASE_REPO_OWNER}/${BASE_REPO_NAME}"' + - &env_default_tags_repository_dir + env: + name: TF_AWS_DEFAULT_TAGS_repository_dir + command: 'echo "${REPO_REL_DIR}"' + apply: + steps: + - *env_default_tags_repository + - *env_default_tags_repository_dir +``` + +NOTE: + +* Appending tags to every resource may regenerate data sources such as `aws_iam_policy_document` which will cause many resources to be modified. See known issue in aws provider [#29421](https://github.com/hashicorp/terraform-provider-aws/issues/29421). + +* To run a local plan outside of terraform, the same environment variables will need to be created. + + ```bash + tfvars () { + export terraform_repository=$(git config --get remote.origin.url | sed 's,^git@,,g' | tr ':' '/' | sed 's,.git$,,g') + export terraform_repository_dir=$(git rev-parse --show-prefix | sed 's,\/$,,g') + } + export TF_AWS_DEFAULT_TAGS_repository=$terraform_repository + export TF_AWS_DEFAULT_TAGS_repository_dir=$terraform_repository_dir + tfvars + terraform plan + ``` + + If a colon is used in the tag name, use the `env` command instead of `export`. + + ```bash + tfvars + env \ + TF_AWS_DEFAULT_TAGS_org:repository=$terraform_repository \ + TF_AWS_DEFAULT_TAGS_org:repository_dir=$terraform_repository_dir \ + terraform plan + ``` + ## Reference + ### Workflow + ```yaml plan: apply: @@ -444,6 +528,7 @@ state_rm: | state_rm | [Stage](#stage) | `steps: [init, state_rm]` | no | How to run state rm for this project. | ### Stage + ```yaml steps: - run: custom-command @@ -457,8 +542,11 @@ steps: | steps | array[[Step](#step)] | `[]` | no | List of steps for this stage. If the steps key is empty, no steps will be run for this stage. | ### Step + #### Built-In Commands + Steps can be a single string for a built-in command. + ```yaml - init - plan @@ -466,12 +554,15 @@ Steps can be a single string for a built-in command. - import - state_rm ``` + | Key | Type | Default | Required | Description | |---------------------------------|--------|---------|----------|------------------------------------------------------------------------------------------------------------------------------| | init/plan/apply/import/state_rm | string | none | no | Use a built-in command without additional configuration. Only `init`, `plan`, `apply`, `import` and `state_rm` are supported | #### Built-In Command With Extra Args + A map from string to `extra_args` for a built-in command with extra arguments. + ```yaml - init: extra_args: [arg1, arg2] @@ -484,79 +575,94 @@ A map from string to `extra_args` for a built-in command with extra arguments. - state_rm: extra_args: [arg1, arg2] ``` + | Key | Type | Default | Required | Description | |---------------------------------|------------------------------------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| init/plan/apply/import/state_rm | map[`extra_args` -> array[string]] | none | no | Use a built-in command and append `extra_args`. Only `init`, `plan`, `apply`, `import` and `state_rm` are supported as keys and only `extra_args` is supported as a value | +| init/plan/apply/import/state_rm | map\[`extra_args` -> array\[string\]\] | none | no | Use a built-in command and append `extra_args`. Only `init`, `plan`, `apply`, `import` and `state_rm` are supported as keys and only `extra_args` is supported as a value | #### Custom `run` Command + A custom command can be written in 2 ways Compact: + ```yaml - run: custom-command arg1 arg2 ``` + | Key | Type | Default | Required | Description | |-----|--------|---------|----------|----------------------| | run | string | none | no | Run a custom command | Full + ```yaml -- run: +- run: command: custom-command arg1 arg2 + shell: sh + shellArgs: + - "--debug" + - "-c" output: show ``` + | Key | Type | Default | Required | Description | |-----|--------------------------------------------------------------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| run | map[string -> string] | none | no | Run a custom command | +| run | map\[string -> string\] | none | no | Run a custom command | | run.command | string | none | yes | Shell command to run | -| run.output | string | "show" | no | How to post-process the output of this command when posted in the PR comment. The options are<br/>* `show` - preserve the full output<br/>* `hide` - hide output from comment (still visible in the real-time streaming output)<br/> * `strip_refreshing` - hide all output up until and including the last line containing "Refreshing...". This matches the behavior of the built-in `plan` command | +| run.shell | string | "sh" | no | Name of the shell to use for command execution | +| run.shellArgs | string or []string | "-c" | no | Command line arguments to be passed to the shell. Cannot be set without `shell` | +| run.output | string | "show" | no | How to post-process the output of this command when posted in the PR comment. The options are<br/>*`show` - preserve the full output<br/>* `hide` - hide output from comment (still visible in the real-time streaming output)<br/> * `strip_refreshing` - hide all output up until and including the last line containing "Refreshing...". This matches the behavior of the built-in `plan` command | -::: tip Notes -* `run` steps in the main `workflow` are executed with the following environment variables: +#### Native Environment Variables + +* `run` steps in the main `workflow` are executed with the following environment variables: note: these variables are not available to `pre` or `post` workflows - * `WORKSPACE` - The Terraform workspace used for this project, ex. `default`. + * `WORKSPACE` - The Terraform workspace used for this project, ex. `default`. NOTE: if the step is executed before `init` then Atlantis won't have switched to this workspace yet. - * `ATLANTIS_TERRAFORM_VERSION` - The version of Terraform used for this project, ex. `0.11.0`. - * `DIR` - Absolute path to the current directory. - * `PLANFILE` - Absolute path to the location where Atlantis expects the plan to + * `ATLANTIS_TERRAFORM_VERSION` - The version of Terraform used for this project, ex. `0.11.0`. + * `DIR` - Absolute path to the current directory. + * `PLANFILE` - Absolute path to the location where Atlantis expects the plan to either be generated (by plan) or already exist (if running apply). Can be used to override the built-in `plan`/`apply` commands, ex. `run: terraform plan -out $PLANFILE`. - * `SHOWFILE` - Absolute path to the location where Atlantis expects the plan in json format to + * `SHOWFILE` - Absolute path to the location where Atlantis expects the plan in json format to either be generated (by show) or already exist (if running policy checks). Can be used to override the built-in `plan`/`apply` commands, ex. `run: terraform show -json $PLANFILE > $SHOWFILE`. - * `POLICYCHECKFILE` - Absolute path to the location of policy check output if Atlantis runs policy checks. - See [policy checking](/docs/policy-checking.html#data-for-custom-run-steps) for information of data structure. - * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. - * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. - * `HEAD_REPO_NAME` - Name of the repository that is getting merged into the base repository, ex. `atlantis`. - * `HEAD_REPO_OWNER` - Owner of the repository that is getting merged into the base repository, ex. `acme-corp`. - * `HEAD_BRANCH_NAME` - Name of the head branch of the pull request (the branch that is getting merged into the base) - * `HEAD_COMMIT` - The sha256 that points to the head of the branch that is being pull requested into the base. If the pull request is from Bitbucket Cloud the string will only be 12 characters long because Bitbucket Cloud truncates its commit IDs. - * `BASE_BRANCH_NAME` - Name of the base branch of the pull request (the branch that the pull request is getting merged into) - * `PROJECT_NAME` - Name of the project configured in `atlantis.yaml`. If no project name is configured this will be an empty string. - * `PULL_NUM` - Pull request number or ID, ex. `2`. - * `PULL_URL` - Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. - * `PULL_AUTHOR` - Username of the pull request author, ex. `acme-user`. - * `REPO_REL_DIR` - The relative path of the project in the repository. For example if your project is in `dir1/dir2/` then this will be set to `"dir1/dir2"`. If your project is at the root this will be `"."`. - * `USER_NAME` - Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. - * `COMMENT_ARGS` - Any additional flags passed in the comment on the pull request. Flags are separated by commas and + * `POLICYCHECKFILE` - Absolute path to the location of policy check output if Atlantis runs policy checks. + See [policy checking](policy-checking.md#data-for-custom-run-steps) for information of data structure. + * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. + * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. + * `HEAD_REPO_NAME` - Name of the repository that is getting merged into the base repository, ex. `atlantis`. + * `HEAD_REPO_OWNER` - Owner of the repository that is getting merged into the base repository, ex. `acme-corp`. + * `HEAD_BRANCH_NAME` - Name of the head branch of the pull request (the branch that is getting merged into the base) + * `HEAD_COMMIT` - The sha256 that points to the head of the branch that is being pull requested into the base. If the pull request is from Bitbucket Cloud the string will only be 12 characters long because Bitbucket Cloud truncates its commit IDs. + * `BASE_BRANCH_NAME` - Name of the base branch of the pull request (the branch that the pull request is getting merged into) + * `PROJECT_NAME` - Name of the project configured in `atlantis.yaml`. If no project name is configured this will be an empty string. + * `PULL_NUM` - Pull request number or ID, ex. `2`. + * `PULL_URL` - Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. + * `PULL_AUTHOR` - Username of the pull request author, ex. `acme-user`. + * `REPO_REL_DIR` - The relative path of the project in the repository. For example if your project is in `dir1/dir2/` then this will be set to `"dir1/dir2"`. If your project is at the root this will be `"."`. + * `USER_NAME` - Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. + * `COMMENT_ARGS` - Any additional flags passed in the comment on the pull request. Flags are separated by commas and every character is escaped, ex. `atlantis plan -- arg1 arg2` will result in `COMMENT_ARGS=\a\r\g\1,\a\r\g\2`. * A custom command will only terminate if all output file descriptors are closed. Therefore a custom command can only be sent to the background (e.g. for an SSH tunnel during the terraform run) when its output is redirected to a different location. For example, Atlantis -will execute a custom script containing the following code to create a SSH tunnel correctly: +will execute a custom script containing the following code to create a SSH tunnel correctly: `ssh -f -M -S /tmp/ssh_tunnel -L 3306:database:3306 -N bastion 1>/dev/null 2>&1`. Without the redirect, the script would block the Atlantis workflow. -* If a workflow step returns a non-zero exit code, the workflow will stop. +* If a workflow step returns a non-zero exit code, the workflow will stop. ::: #### Environment Variable `env` Command + The `env` command allows you to set environment variables that will be available to all steps defined **below** the `env` step. You can set hard coded values via the `value` key, or set dynamic values via the `command` key which allows you to run any command and uses the output as the environment variable value. + ```yaml - env: name: ENV_NAME @@ -564,35 +670,72 @@ as the environment variable value. - env: name: ENV_NAME_2 command: 'echo "dynamic-value-$(date)"' +- env: + name: ENV_NAME_3 + command: echo ${DIR%$REPO_REL_DIR} + shell: bash + shellArgs: + - "--verbose" + - "-c" ``` + | Key | Type | Default | Required | Description | |-----------------|-----------------------|---------|----------|-----------------------------------------------------------------------------------------------------------------| -| env | map[string -> string] | none | no | Set environment variables for subsequent steps | +| env | map\[string -> string\] | none | no | Set environment variables for subsequent steps | | env.name | string | none | yes | Name of the environment variable | | env.value | string | none | no | Set the value of the environment variable to a hard-coded string. Cannot be set at the same time as `command` | | env.command | string | none | no | Set the value of the environment variable to the output of a command. Cannot be set at the same time as `value` | +| env.shell | string | "sh" | no | Name of the shell to use for command execution. Cannot be set without `command` | +| env.shellArgs | string or []string | "-c" | no | Command line arguments to be passed to the shell. Cannot be set without `shell` | ::: tip Notes + * `env` `command`'s can use any of the built-in environment variables available - to `run` commands. + to `run` commands. ::: #### Multiple Environment Variables `multienv` Command + The `multienv` command allows you to set dynamic number of multiple environment variables that will be available to all steps defined **below** the `multienv` step. + +Compact: + ```yaml - multienv: custom-command ``` -| Key | Type | Default | Required | Description | -|----------|--------|---------|----------|--------------------------------------------------------------------------------| -| multienv | string | none | no | Run a custom command and add set environment variables according to the result | -The result of the executed command must have a fixed format: -EnvVar1Name=value1,EnvVar2Name=value2,EnvVar3Name=value3 +| Key | Type | Default | Required | Description | +|----------|--------|---------|----------|------------------------------------------------------------| +| multienv | string | none | no | Run a custom command and add printed environment variables | -The name-value pairs in the result are added as environment variables if success is true otherwise the workflow execution stops with error and the errorMessage is getting displayed. +Full: + +```yaml +- multienv: + command: custom-command + shell: bash + shellArgs: + - "--verbose" + - "-c" + output: show +``` + +| Key | Type | Default | Required | Description | +|--------------------|-----------------------|---------|----------|-------------------------------------------------------------------------------------| +| multienv | map[string -> string] | none | no | Run a custom command and add printed environment variables | +| multienv.command | string | none | yes | Name of the custom script to run | +| multienv.shell | string | "sh" | no | Name of the shell to use for command execution | +| multienv.shellArgs | string or []string | "-c" | no | Command line arguments to be passed to the shell. Cannot be set without `shell` | +| multienv.output | string | "show" | no | Setting output to "hide" will supress the message obout added environment variables | + +The output of the command execution must have the following format: +`EnvVar1Name=value1,EnvVar2Name=value2,EnvVar3Name=value3` + +The name-value pairs in the output are added as environment variables if command execution is successful, otherwise the workflow execution is interrupted with an error and the errorMessage is returned. ::: tip Notes + * `multienv` `command`'s can use any of the built-in environment variables available - to `run` commands. + to `run` commands. ::: diff --git a/runatlantis.io/docs/deployment.md b/runatlantis.io/docs/deployment.md index ca1ba1bb72..5df948bc64 100644 --- a/runatlantis.io/docs/deployment.md +++ b/runatlantis.io/docs/deployment.md @@ -1,29 +1,33 @@ # Deployment + This page covers getting Atlantis up and running in your infrastructure. ::: tip Prerequisites -* You have created [access credentials](access-credentials.html) for your Atlantis user -* You have created a [webhook secret](webhook-secrets.html) -::: -[[toc]] +* You have created [access credentials](access-credentials.md) for your Atlantis user +* You have created a [webhook secret](webhook-secrets.md) +::: ## Architecture Overview + ### Runtime + Atlantis is a simple [Go](https://golang.org/) app. It receives webhooks from your Git host and executes Terraform commands locally. There is an official Atlantis [Docker image](https://ghcr.io/runatlantis/atlantis). ### Routing + Atlantis and your Git host need to be able to route and communicate with one another. Your Git host needs to be able to send webhooks to Atlantis and Atlantis needs to be able to make API calls to your Git host. If you're using -a public Git host like github.com, gitlab.com, bitbucket.org, or dev.azure.com then you'll need to +a public Git host like github.com, gitlab.com, gitea.com, bitbucket.org, or dev.azure.com then you'll need to expose Atlantis to the internet. -If you're using a private Git host like GitHub Enterprise, GitLab Enterprise or +If you're using a private Git host like GitHub Enterprise, GitLab Enterprise, self-hosted Gitea or Bitbucket Server, then Atlantis needs to be routable from the private host and Atlantis will need to be able to route to the private host. ### Data + Atlantis has no external database. Atlantis stores Terraform plan files on disk. If Atlantis loses that data in between a `plan` and `apply` cycle, then users will have to re-run `plan`. Because of this, you may want to provision a persistent disk @@ -32,6 +36,7 @@ for Atlantis. ## Deployment Pick your deployment type: + * [Kubernetes Helm Chart](#kubernetes-helm-chart) * [Kubernetes Manifests](#kubernetes-manifests) * [Kubernetes Kustomize](#kubernetes-kustomize) @@ -41,21 +46,27 @@ Pick your deployment type: * [Docker](#docker) * [Roll Your Own](#roll-your-own) - ### Kubernetes Helm Chart + Atlantis has an [official Helm chart](https://github.com/runatlantis/helm-charts/tree/main/charts/atlantis) To install: + 1. Add the runatlantis helm chart repository to helm + ```bash helm repo add runatlantis https://runatlantis.github.io/helm-charts ``` + 1. `cd` into a directory where you're going to configure your Atlantis Helm chart 1. Create a `values.yaml` file by running + ```bash helm inspect values runatlantis/atlantis > values.yaml ``` + 1. Edit `values.yaml` and add your access credentials and webhook secret + ```yaml # for example github: @@ -63,28 +74,33 @@ To install: token: bar secret: baz ``` + 1. Edit `values.yaml` and set your `orgAllowlist` (see [Repo Allowlist](server-configuration.md#repo-allowlist) for more information) + ```yaml orgAllowlist: github.com/runatlantis/* ``` - **Note**: For helm chart version < `4.0.2`, `orgWhitelist` must be used instead. -1. Configure any other variables (see [https://github.com/runatlantis/helm-charts#customization](https://github.com/runatlantis/helm-charts#customization) + + **Note**: For helm chart version < `4.0.2`, `orgWhitelist` must be used instead. +1. Configure any other variables (see [Atlantis Helm Chart: Customization](https://github.com/runatlantis/helm-charts#customization) for documentation) 1. Run + ```sh helm install atlantis runatlantis/atlantis -f values.yaml ``` If you are using helm v2, run: + ```sh helm install -f values.yaml runatlantis/atlantis ``` - Atlantis should be up and running in minutes! See [Next Steps](#next-steps) for what to do next. ### Kubernetes Manifests + If you'd like to use a raw Kubernetes manifest, we offer either a [Deployment](https://kubernetes.io/docs/concepts/workloads/controllers/deployment/) or a [Statefulset](https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/) with persistent storage. @@ -94,6 +110,7 @@ or you upgrade Atlantis, you won't lose plans that haven't been applied. If you do lose that data, you just need to run `atlantis plan` again so it's not the end of the world. Regardless of whether you choose a Deployment or StatefulSet, first create a Secret with the webhook secret and access token: + ```bash echo -n "yourtoken" > token echo -n "yoursecret" > webhook-secret @@ -101,25 +118,30 @@ kubectl create secret generic atlantis-vcs --from-file=token --from-file=webhook ``` Next, edit the manifests below as follows: -1. Replace `<VERSION>` in `image: ghcr.io/runatlantis/atlantis:<VERSION>` with the most recent version from [https://github.com/runatlantis/atlantis/releases/latest](https://github.com/runatlantis/atlantis/releases/latest). + +1. Replace `<VERSION>` in `image: ghcr.io/runatlantis/atlantis:<VERSION>` with the most recent version from [GitHub: Atlantis latest release](https://github.com/runatlantis/atlantis/releases/latest). * NOTE: You never want to run with `:latest` because if your Pod moves to a new node, Kubernetes will pull the latest image and you might end up upgrading Atlantis by accident! -1. Replace `value: github.com/yourorg/*` under `name: ATLANTIS_REPO_ALLOWLIST` with the allowlist pattern -for your Terraform repos. See [Repo Allowlist](server-configuration.html#repo-allowlist) for more details. -1. If you're using GitHub: +2. Replace `value: github.com/yourorg/*` under `name: ATLANTIS_REPO_ALLOWLIST` with the allowlist pattern +for your Terraform repos. See [--repo-allowlist](server-configuration.md#repo-allowlist) for more details. +3. If you're using GitHub: 1. Replace `<YOUR_GITHUB_USER>` with the username of your Atlantis GitHub user without the `@`. - 2. Delete all the `ATLANTIS_GITLAB_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. -2. If you're using GitLab: + 2. Delete all the `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. +4. If you're using GitLab: 1. Replace `<YOUR_GITLAB_USER>` with the username of your Atlantis GitLab user without the `@`. - 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. -3. If you're using Bitbucket: + 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITEA_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. +5. If you're using Gitea: + 1. Replace `<YOUR_GITEA_USER>` with the username of your Atlantis Gitea user without the `@`. + 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. +6. If you're using Bitbucket: 1. Replace `<YOUR_BITBUCKET_USER>` with the username of your Atlantis Bitbucket user without the `@`. - 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. -4. If you're using Azure DevOps: + 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. +7. If you're using Azure DevOps: 1. Replace `<YOUR_AZUREDEVOPS_USER>` with the username of your Atlantis Azure DevOps user without the `@`. - 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, and `ATLANTIS_BITBUCKET_*` environment variables. + 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, and `ATLANTIS_BITBUCKET_*` environment variables. #### StatefulSet Manifest + <details> <summary>Show...</summary> @@ -182,6 +204,21 @@ spec: key: webhook-secret ### End GitLab Config ### + ### Gitea Config ### + - name: ATLANTIS_GITEA_USER + value: <YOUR_GITEA_USER> # 4i. If you're using Gitea replace <YOUR_GITEA_USER> with the username of your Atlantis Gitea user without the `@`. + - name: ATLANTIS_GITEA_TOKEN + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: token + - name: ATLANTIS_GITEA_WEBHOOK_SECRET + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: webhook-secret + ### End Gitea Config ### + ### Bitbucket Config ### - name: ATLANTIS_BITBUCKET_USER value: <YOUR_BITBUCKET_USER> # 5i. If you're using Bitbucket replace <YOUR_BITBUCKET_USER> with the username of your Atlantis Bitbucket user without the `@`. @@ -274,10 +311,11 @@ spec: selector: app.kubernetes.io/name: atlantis ``` -</details> +</details> #### Deployment Manifest + <details> <summary>Show...</summary> @@ -335,6 +373,21 @@ spec: key: webhook-secret ### End GitLab Config ### + ### Gitea Config ### + - name: ATLANTIS_GITEA_USER + value: <YOUR_GITEA_USER> # 4i. If you're using Gitea replace <YOUR_GITEA_USER> with the username of your Atlantis Gitea user without the `@`. + - name: ATLANTIS_GITEA_TOKEN + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: token + - name: ATLANTIS_GITEA_WEBHOOK_SECRET + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: webhook-secret + ### End Gitea Config ### + ### Bitbucket Config ### - name: ATLANTIS_BITBUCKET_USER value: <YOUR_BITBUCKET_USER> # 5i. If you're using Bitbucket replace <YOUR_BITBUCKET_USER> with the username of your Atlantis Bitbucket user without the `@`. @@ -407,14 +460,16 @@ spec: selector: app.kubernetes.io/name: atlantis ``` + </details> #### Routing and SSL + The manifests above create a Kubernetes `Service` of `type: ClusterIP` which isn't accessible outside your cluster. Depending on how you're doing routing into Kubernetes, you may want to use a Service of `type: LoadBalancer` so that Atlantis is accessible to GitHub/GitLab and your internal users. -If you want to add SSL you can use something like [https://github.com/jetstack/cert-manager](https://github.com/jetstack/cert-manager) to generate SSL +If you want to add SSL you can use something like [cert-manager](https://github.com/cert-manager/cert-manager) to generate SSL certs and mount them into the Pod. Then set the `ATLANTIS_SSL_CERT_FILE` and `ATLANTIS_SSL_KEY_FILE` environment variables to enable SSL. You could also set up SSL at your LoadBalancer. @@ -427,6 +482,7 @@ A `kustomization.yaml` file is provided in the directory `kustomize/`, so you ma You will need to provide a secret (with the default name of `atlantis-vcs`) to configure Atlantis with access credentials for your remote repositories. Example: + ```yaml bases: - github.com/runatlantis/atlantis//kustomize @@ -452,7 +508,6 @@ patchesStrategicMerge: #### Required - ```yaml ... containers: @@ -483,6 +538,26 @@ containers: key: webhook-secret ``` +#### Gitea + +```yaml +containers: +- name: atlantis + env: + - name: ATLANTIS_GITEA_USER + value: <YOUR_GITEA_USER> # 4i. If you're using Gitea replace <YOUR_GITEA_USER> with the username of your Atlantis Gitea user without the `@`. + - name: ATLANTIS_GITEA_TOKEN + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: token + - name: ATLANTIS_GITEA_WEBHOOK_SECRET + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: webhook-secret +``` + #### GitHub ```yaml @@ -521,37 +596,44 @@ containers: ``` ### OpenShift + The Helm chart and Kubernetes manifests above are compatible with OpenShift, however you need to run with an additional environment variable: `HOME=/home/atlantis`. This is required because OpenShift runs Docker images with random user id's that use `/` as their home directory. ### AWS Fargate + If you'd like to run Atlantis on [AWS Fargate](https://aws.amazon.com/fargate/) check out the Atlantis module on the [Terraform Module Registry](https://registry.terraform.io/modules/terraform-aws-modules/atlantis/aws/latest) and then check out the [Next Steps](#next-steps). ### Google Kubernetes Engine (GKE) + You can run Atlantis on GKE using the [Helm chart](#kubernetes-helm-chart) or the [manifests](#kubernetes-manifests). There is also a set of full Terraform configurations that create a GKE Cluster, -Cloud Storage Backend and TLS certs: [https://github.com/sethvargo/atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke). +Cloud Storage Backend and TLS certs: [sethvargo atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke). Once you're done, see [Next Steps](#next-steps). ### Google Compute Engine (GCE) -Atlantis can be run on Google Compute Engine using a Terraform module that deploys it as a Docker container on a managed Compute Engine instance. -This [Terraform module](https://registry.terraform.io/modules/bschaatsbergen/atlantis/gce/latest) features the creation of a Cloud load balancer, a Container-Optimized OS-based VM, a persistent data disk, and a managed instance group. +Atlantis can be run on Google Compute Engine using a Terraform module that deploys it as a Docker container on a managed Compute Engine instance. + +This [Terraform module](https://registry.terraform.io/modules/runatlantis/atlantis/gce/latest) features the creation of a Cloud load balancer, a Container-Optimized OS-based VM, a persistent data disk, and a managed instance group. After it is deployed, see [Next Steps](#next-steps). ### Docker + Atlantis has an [official](https://ghcr.io/runatlantis/atlantis) Docker image: `ghcr.io/runatlantis/atlantis`. #### Customization + If you need to modify the Docker image that we provide, for instance to add the terragrunt binary, you can do something like this: 1. Create a custom docker file + ```dockerfile FROM ghcr.io/runatlantis/atlantis:{latest version} @@ -565,32 +647,37 @@ Additionally, the /docker-entrypoint.d/ directory offers a flexible option for i **Important Notice**: There is a critical update regarding the data directory in Atlantis. In versions prior to 0.26.0, the directory was configured to be accessible by the root user. However, with the transition to the atlantis user in newer versions, it is imperative to update the directory permissions accordingly in your current deployment when upgrading to a version later than 0.26.0. This step ensures seamless access and functionality for the atlantis user. 1. Build your Docker image + ```bash docker build -t {YOUR_DOCKER_ORG}/atlantis-custom . ``` 1. Run your image + ```bash docker run {YOUR_DOCKER_ORG}/atlantis-custom server --gh-user=GITHUB_USERNAME --gh-token=GITHUB_TOKEN ``` ### Microsoft Azure -The standard [Kubernetes Helm Chart](#kubernetes-helm-chart) should work fine on [Azure Kubernetes Service](https://docs.microsoft.com/en-us/azure/aks/intro-kubernetes). +The standard [Kubernetes Helm Chart](#kubernetes-helm-chart) should work fine on [Azure Kubernetes Service](https://docs.microsoft.com/en-us/azure/aks/intro-kubernetes). -Another option is [Azure Container Instances](https://docs.microsoft.com/en-us/azure/container-instances/). See this community member's [repo](https://github.com/jplane/atlantis-on-aci) or the new and more up-to-date [Terraform module](https://github.com/getindata/terraform-azurerm-atlantis) for install scripts and more information on running Atlantis on ACI. +Another option is [Azure Container Instances](https://docs.microsoft.com/en-us/azure/container-instances/). See this community member's [repo](https://github.com/jplane/atlantis-on-aci) or the new and more up-to-date [Terraform module](https://github.com/getindata/terraform-azurerm-atlantis) for install scripts and more information on running Atlantis on ACI. **Note on ACI Deployment:** Due to a bug in earlier Docker releases, Docker v23.0.0 or later is required for straightforward deployment. Alternatively, the Atlantis Docker image can be pushed to a private registry such as ACR and then used. ### Roll Your Own + If you want to roll your own Atlantis installation, you can get the `atlantis` -binary from [https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases) +binary from [GitHub](https://github.com/runatlantis/atlantis/releases) or use the [official Docker image](https://ghcr.io/runatlantis/atlantis). #### Startup Command + The exact flags to `atlantis server` depends on your Git host: ##### GitHub + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -601,6 +688,7 @@ atlantis server \ ``` ##### GitHub Enterprise + ```bash HOSTNAME=YOUR_GITHUB_ENTERPRISE_HOSTNAME # ex. github.runatlantis.io atlantis server \ @@ -613,6 +701,7 @@ atlantis server \ ``` ##### GitLab + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -623,6 +712,7 @@ atlantis server \ ``` ##### GitLab Enterprise + ```bash HOSTNAME=YOUR_GITLAB_ENTERPRISE_HOSTNAME # ex. gitlab.runatlantis.io atlantis server \ @@ -634,7 +724,20 @@ atlantis server \ --repo-allowlist="$REPO_ALLOWLIST" ``` +##### Gitea + +```bash +atlantis server \ +--atlantis-url="$URL" \ +--gitea-user="$USERNAME" \ +--gitea-token="$TOKEN" \ +--gitea-webhook-secret="$SECRET" \ +--gitea-page-size=30 \ +--repo-allowlist="$REPO_ALLOWLIST" +``` + ##### Bitbucket Cloud (bitbucket.org) + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -645,6 +748,7 @@ atlantis server \ ``` ##### Bitbucket Server (aka Stash) + ```bash BASE_URL=YOUR_BITBUCKET_SERVER_URL # ex. http://bitbucket.mycorp:7990 atlantis server \ @@ -673,21 +777,23 @@ atlantis server \ ``` Where -- `$URL` is the URL that Atlantis can be reached at -- `$USERNAME` is the GitHub/GitLab/Bitbucket/AzureDevops username you generated the token for -- `$TOKEN` is the access token you created. If you don't want this to be passed + +* `$URL` is the URL that Atlantis can be reached at +* `$USERNAME` is the GitHub/GitLab/Gitea/Bitbucket/AzureDevops username you generated the token for +* `$TOKEN` is the access token you created. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file - (see [Configuration](/docs/server-configuration.html#environment-variables)) - or as an environment variable: `ATLANTIS_GH_TOKEN` or `ATLANTIS_GITLAB_TOKEN` + (see [Configuration](server-configuration.md#environment-variables)) + or as an environment variable: `ATLANTIS_GH_TOKEN` or `ATLANTIS_GITLAB_TOKEN` or `ATLANTIS_GITEA_TOKEN` or `ATLANTIS_BITBUCKET_TOKEN` or `ATLANTIS_AZUREDEVOPS_TOKEN` -- `$SECRET` is the random key you used for the webhook secret. +* `$SECRET` is the random key you used for the webhook secret. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file - (see [Configuration](/docs/server-configuration.html#environment-variables)) - or as an environment variable: `ATLANTIS_GH_WEBHOOK_SECRET` or `ATLANTIS_GITLAB_WEBHOOK_SECRET` -- `$REPO_ALLOWLIST` is which repos Atlantis can run on, ex. + (see [Configuration](server-configuration.md#environment-variables)) + or as an environment variable: `ATLANTIS_GH_WEBHOOK_SECRET` or `ATLANTIS_GITLAB_WEBHOOK_SECRET` or + `ATLANTIS_GITEA_WEBHOOK_SECRET` +* `$REPO_ALLOWLIST` is which repos Atlantis can run on, ex. `github.com/runatlantis/*` or `github.enterprise.corp.com/*`. - See [Repo Allowlist](server-configuration.html#repo-allowlist) for more details. + See [--repo-allowlist](server-configuration.md#repo-allowlist) for more details. Atlantis is now running! ::: tip @@ -696,5 +802,6 @@ restart it in case of failure. ::: ## Next Steps + * To ensure Atlantis is running, load its UI. By default Atlantis runs on port `4141`. -* Now you're ready to add Webhooks to your repos. See [Configuring Webhooks](configuring-webhooks.html). +* Now you're ready to add Webhooks to your repos. See [Configuring Webhooks](configuring-webhooks.md). diff --git a/runatlantis.io/docs/faq.md b/runatlantis.io/docs/faq.md index 2cea8e8c92..1764719d97 100644 --- a/runatlantis.io/docs/faq.md +++ b/runatlantis.io/docs/faq.md @@ -1,4 +1,5 @@ # FAQ + **Q: Does Atlantis affect Terraform [remote state](https://developer.hashicorp.com/terraform/language/state/remote)?** A: No. Atlantis does not interfere with Terraform remote state in any way. Under the hood, Atlantis is simply executing `terraform plan` and `terraform apply`. diff --git a/runatlantis.io/docs/how-atlantis-works.md b/runatlantis.io/docs/how-atlantis-works.md index ed57d988f5..f486091b3b 100644 --- a/runatlantis.io/docs/how-atlantis-works.md +++ b/runatlantis.io/docs/how-atlantis-works.md @@ -1,7 +1,8 @@ # How Atlantis Works + This section of docs talks about how Atlantis at deeper level. -* [Locking](locking.html) -* [Autoplanning](autoplanning.html) -* [Automerging](automerging.html) -* [Security](security.html) +* [Locking](locking.md) +* [Autoplanning](autoplanning.md) +* [Automerging](automerging.md) +* [Security](security.md) diff --git a/runatlantis.io/docs/installation-guide.md b/runatlantis.io/docs/installation-guide.md index fafa5d5b90..f5f1bd71d1 100644 --- a/runatlantis.io/docs/installation-guide.md +++ b/runatlantis.io/docs/installation-guide.md @@ -1,20 +1,22 @@ # Installation Guide + This guide is for installing a **production-ready** instance of Atlantis onto your infrastructure: + 1. First, ensure your Terraform setup meets the Atlantis **requirements** - * See [Requirements](requirements.html) -1. Create **access credentials** for your Git host (GitHub, GitLab, Bitbucket, Azure DevOps) - * See [Generating Git Host Access Credentials](access-credentials.html) + * See [Requirements](requirements.md) +1. Create **access credentials** for your Git host (GitHub, GitLab, Gitea, Bitbucket, Azure DevOps) + * See [Generating Git Host Access Credentials](access-credentials.md) 1. Create a **webhook secret** so Atlantis can validate webhooks - * See [Creating a Webhook Secret](webhook-secrets.html) + * See [Creating a Webhook Secret](webhook-secrets.md) 1. **Deploy** Atlantis into your infrastructure - * See [Deployment](deployment.html) + * See [Deployment](deployment.md) 1. Configure **Webhooks** on your Git host so Atlantis can respond to your pull requests - * See [Configuring Webhooks](configuring-webhooks.html) + * See [Configuring Webhooks](configuring-webhooks.md) 1. Configure **provider credentials** so Atlantis can actually run Terraform commands - * See [Provider Credentials](provider-credentials.html) + * See [Provider Credentials](provider-credentials.md) :::tip -If you want to test out Atlantis first, check out [Test Drive](../guide/test-drive.html) -and [Testing Locally](../guide/testing-locally.html). +If you want to test out Atlantis first, check out [Test Drive](../guide/test-drive.md) +and [Testing Locally](../guide/testing-locally.md). ::: diff --git a/runatlantis.io/docs/locking.md b/runatlantis.io/docs/locking.md index 65836d3b70..c75e2b3fce 100644 --- a/runatlantis.io/docs/locking.md +++ b/runatlantis.io/docs/locking.md @@ -1,4 +1,5 @@ # Locking + When `plan` is run, the directory and Terraform workspace are **Locked** until the pull request is merged or closed, or the plan is manually deleted. If another user attempts to `plan` for the same directory and workspace in a different pull request @@ -12,9 +13,8 @@ Which links them to the pull request that holds the lock. Only the directory in the repo and Terraform workspace are locked, not the whole repo. ::: -[[toc]] - ## Why + 1. Because `atlantis apply` is being done before the pull request is merged, after an apply your `main` branch does not represent the most up to date version of your infrastructure anymore. With locking, you can ensure that no other changes will be made until the @@ -30,6 +30,7 @@ but with the added ability to re-plan/apply multiple times if things don't work. will be made invalid after the in-progress plan is applied. ## Viewing Locks + To view locks, go to the URL that Atlantis is hosted at: ![Locks View](./images/locks-ui.png) @@ -41,6 +42,7 @@ You can click on a lock to view its details: </p> ## Unlocking + The project and workspace will be automatically unlocked when the PR is merged or closed. To unlock the project and workspace without completing an `apply` and merging, comment `atlantis unlock` on the PR, @@ -59,6 +61,7 @@ to delete the lock. Once a plan is discarded, you'll need to run `plan` again prior to running `apply` when you go back to that pull request. ## Relationship to Terraform State Locking + Atlantis does not conflict with [Terraform State Locking](https://developer.hashicorp.com/terraform/language/state/locking). Under the hood, all Atlantis is doing is running `terraform plan` and `apply` and so all of the locking built in to those commands by Terraform isn't affected. diff --git a/runatlantis.io/docs/policy-checking.md b/runatlantis.io/docs/policy-checking.md index c996ef7ee0..98bc6c760b 100644 --- a/runatlantis.io/docs/policy-checking.md +++ b/runatlantis.io/docs/policy-checking.md @@ -10,7 +10,7 @@ for using this step include: ## How it works? -Enabling "policy checking" in addition to the [mergeable apply requirement](/docs/command-requirements.html#supported-requirements) blocks applies on plans that fail any of the defined conftest policies. +Enabling "policy checking" in addition to the [mergeable apply requirement](command-requirements.md#supported-requirements) blocks applies on plans that fail any of the defined conftest policies. ![Policy Check Apply Failure](./images/policy-check-apply-failure.png) @@ -20,9 +20,9 @@ Any failures need to either be addressed in a successive commit, or approved by ![Policy Check Approval](./images/policy-check-approval.png) - Policy approvals may be cleared either by re-planing, or by issuing the following command: -``` + +```shell atlantis approve_policies --clear-policy-approval ``` @@ -44,11 +44,11 @@ All repositories will have policy checking enabled. ### Step 2: Define the policy configuration -Policy Configuration is defined in the [server-side repo configuration](https://www.runatlantis.io/docs/server-side-repo-config.html#reference). +Policy Configuration is defined in the [server-side repo configuration](server-side-repo-config.md#reference). In this example we will define one policy set with one owner: -``` +```yaml policies: owners: users: @@ -71,12 +71,13 @@ policies: - `source` - Tells atlantis where to fetch the policies from. Currently you can only host policies locally by using `local`. - `owners` - Defines the users/teams which are able to approve a specific policy set. - `approve_count` - Defines the number of approvals needed to bypass policy checks. Defaults to the top-level policies configuration, if not specified. +- `prevent_self_approve` - Defines whether the PR author can approve policies -By default conftest is configured to only run the `main` package. If you wish to run specific/multiple policies consider passing `--namespace` or `--all-namespaces` to conftest with [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) via a custom workflow as shown in the below example. +By default conftest is configured to only run the `main` package. If you wish to run specific/multiple policies consider passing `--namespace` or `--all-namespaces` to conftest with [`extra_args`](custom-workflows.md#adding-extra-arguments-to-terraform-commands) via a custom workflow as shown in the below example. Example Server Side Repo configuration using `--all-namespaces` and a local src dir. -``` +```yaml repos: - id: github.com/myorg/example-repo workflow: custom @@ -104,7 +105,7 @@ workflows: Conftest policies are based on [Open Policy Agent (OPA)](https://www.openpolicyagent.org/) and written in [rego](https://www.openpolicyagent.org/docs/latest/policy-language/#what-is-rego). Following our example, simply create a `rego` file in `null_resource_warning` folder with following code, the code below a simple policy that will fail for plans containing newly created `null_resource`s. -``` +```rego package main resource_types = {"null_resource"} @@ -144,7 +145,7 @@ That's it! Now your Atlantis instance is configured to run policies on your Terr ### Pulling policies from a remote location -Conftest supports [pulling policies](https://www.conftest.dev/sharing/#pulling) from remote locations such as S3, git, OCI, and other protocols supported by the [go-getter](https://github.com/hashicorp/go-getter) library. The key [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) can be used to pass in the [`--update`](https://www.conftest.dev/sharing/#-update-flag) flag to tell `conftest` to pull the policies into the project folder before running the policy check. +Conftest supports [pulling policies](https://www.conftest.dev/sharing/#pulling) from remote locations such as S3, git, OCI, and other protocols supported by the [go-getter](https://github.com/hashicorp/go-getter) library. The key [`extra_args`](custom-workflows.md#adding-extra-arguments-to-terraform-commands) can be used to pass in the [`--update`](https://www.conftest.dev/sharing/#-update-flag) flag to tell `conftest` to pull the policies into the project folder before running the policy check. ```yaml workflows: @@ -163,7 +164,7 @@ Note that authentication may need to be configured separately if pulling policie ### Running policy check against Terraform source code -By default, Atlantis runs the policy check against the [`SHOWFILE`](https://www.runatlantis.io/docs/custom-workflows.html#custom-run-command). In order to run the policy test against Terraform files directly, override the default `conftest` command used and pass in `*.tf` as one of the inputs to `conftest`. The `show` step is required so that Atlantis will generate the `SHOWFILE`. +By default, Atlantis runs the policy check against the [`SHOWFILE`](custom-workflows.md#custom-run-command). In order to run the policy test against Terraform files directly, override the default `conftest` command used and pass in `*.tf` as one of the inputs to `conftest`. The `show` step is required so that Atlantis will generate the `SHOWFILE`. ```yaml workflows: @@ -171,13 +172,12 @@ workflows: policy_check: steps: - show - - run: conftest test $SHOWFILE *.tf + - run: conftest test $SHOWFILE *.tf --no-fail ``` ### Quiet policy checks -By default, Atlantis will add a comment to all pull requests with the policy check result - both successes and failures. Version 0.21.0 added the [`--quiet-policy-checks`](server-configuration.html#quiet-policy-checks) option, which will instead only add comments when policy checks fail, significantly reducing the number of comments when most policy check results succeed. - +By default, Atlantis will add a comment to all pull requests with the policy check result - both successes and failures. Version 0.21.0 added the [`--quiet-policy-checks`](server-configuration.md#quiet-policy-checks) option, which will instead only add comments when policy checks fail, significantly reducing the number of comments when most policy check results succeed. ### Data for custom run steps @@ -198,9 +198,10 @@ When the policy check workflow runs, a file is created in the working directory ## Running policy check only on some repositories -When policy checking is enabled it will be enforced on all repositories, in order to disable policy checking on some repositories first [enable policy checks](https://www.runatlantis.io/docs/policy-checking.html#getting-started) and then disable it explicitly on each repository with the `policy_check` flag. +When policy checking is enabled it will be enforced on all repositories, in order to disable policy checking on some repositories first [enable policy checks](policy-checking.md#getting-started) and then disable it explicitly on each repository with the `policy_check` flag. For server side config: + ```yml # repos.yaml repos: @@ -216,6 +217,7 @@ repos: ``` For repo level `atlantis.yaml` config: + ```yml version: 3 projects: diff --git a/runatlantis.io/docs/post-workflow-hooks.md b/runatlantis.io/docs/post-workflow-hooks.md index aca89a08aa..91ba0b7aa7 100644 --- a/runatlantis.io/docs/post-workflow-hooks.md +++ b/runatlantis.io/docs/post-workflow-hooks.md @@ -2,12 +2,10 @@ Post workflow hooks can be defined to run scripts after default or custom workflows are executed. Post workflow hooks differ from [custom -workflows](custom-workflows.html#custom-run-command) in that they are run +workflows](custom-workflows.md#custom-run-command) in that they are run outside of Atlantis commands. Which means they do not surface their output back to the PR as a comment. -[[toc]] - ## Usage Post workflow hooks can only be specified in the Server-Side Repo Config under @@ -42,8 +40,7 @@ You can add a post workflow hook to perform custom reporting after all workflows have finished. In this example we use a custom workflow to generate cost estimates for each -workflow using [Infracost](https://www.infracost.io/docs/integrations/atlantis/), then create a summary report after all workflows have completed. - +workflow using [Infracost](https://www.infracost.io/docs/integrations/cicd/#cicd-integrations), then create a summary report after all workflows have completed. ```yaml # repos.yaml @@ -88,7 +85,7 @@ repos: ### Custom `run` Command This is very similar to [custom workflow run -command](custom-workflows.html#custom-run-command). +command](custom-workflows.md#custom-run-command). ```yaml - run: custom-command @@ -102,6 +99,7 @@ command](custom-workflows.html#custom-run-command). | shellArgs | string | '-c' | no | The shell arguments to use for running the command | ::: tip Notes + * `run` commands are executed with the following environment variables: * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. diff --git a/runatlantis.io/docs/pre-workflow-hooks.md b/runatlantis.io/docs/pre-workflow-hooks.md index 9087be24c7..dce3f2fe7d 100644 --- a/runatlantis.io/docs/pre-workflow-hooks.md +++ b/runatlantis.io/docs/pre-workflow-hooks.md @@ -2,15 +2,13 @@ Pre workflow hooks can be defined to run scripts before default or custom workflows are executed. Pre workflow hooks differ from [custom -workflows](custom-workflows.html#custom-run-command) in several ways. +workflows](custom-workflows.md#custom-run-command) in several ways. 1. Pre workflow hooks do not require the repository configuration to be - present. This can be utilized to [dynamically generate repo configs](pre-workflow-hooks.html#dynamic-repo-config-generation). + present. This can be utilized to [dynamically generate repo configs](pre-workflow-hooks.md#dynamic-repo-config-generation). 2. Pre workflow hooks are run outside of Atlantis commands. Which means they do not surface their output back to the PR as a comment. -[[toc]] - ## Usage Pre workflow hooks can only be specified in the Server-Side Repo Config under the @@ -19,9 +17,9 @@ Pre workflow hooks can only be specified in the Server-Side Repo Config under th ::: tip Note By default, `pre-workflow-hooks` do not prevent Atlantis from executing its workflows(`plan`, `apply`) even if a `run` command exits with an error. This -behavior can be changed by setting the [fail-on-pre-workflow-hook-error](server-configuration.html#fail-on-pre-workflow-hook-error) +behavior can be changed by setting the [fail-on-pre-workflow-hook-error](server-configuration.md#fail-on-pre-workflow-hook-error) flag in the Atlantis server configuration. -::: +::: ## Atlantis Command Targetting @@ -84,7 +82,7 @@ repos: ### Custom `run` Command This is very similar to the [custom workflow run -command](custom-workflows.html#custom-run-command). +command](custom-workflows.md#custom-run-command). ```yaml - run: custom-command @@ -98,6 +96,7 @@ command](custom-workflows.html#custom-run-command). | shellArgs | string | '-c' | no | The shell arguments to use for running the command | ::: tip Notes + * `run` commands are executed with the following environment variables: * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. @@ -109,7 +108,7 @@ command](custom-workflows.html#custom-run-command). * `PULL_NUM` - Pull request number or ID, ex. `2`. * `PULL_URL` - Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. * `PULL_AUTHOR` - Username of the pull request author, ex. `acme-user`. - * `DIR` - The absolute path to the root of the cloned repository. + * `DIR` - The absolute path to the root of the cloned repository. * `USER_NAME` - Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. * `COMMENT_ARGS` - Any additional flags passed in the comment on the pull request. Flags are separated by commas and every character is escaped, ex. `atlantis plan -- arg1 arg2` will result in `COMMENT_ARGS=\a\r\g\1,\a\r\g\2`. diff --git a/runatlantis.io/docs/provider-credentials.md b/runatlantis.io/docs/provider-credentials.md index 793c082e94..09dd289759 100644 --- a/runatlantis.io/docs/provider-credentials.md +++ b/runatlantis.io/docs/provider-credentials.md @@ -1,17 +1,19 @@ # Provider Credentials + Atlantis runs Terraform by simply executing `terraform plan` and `apply` commands on the server Atlantis is hosted on. Just like when you run Terraform locally, Atlantis needs credentials for your specific provider. It's up to you how you provide credentials for your specific provider to Atlantis: -* The Atlantis [Helm Chart](deployment.html#kubernetes-helm-chart) and - [AWS Fargate Module](deployment.html#aws-fargate) have their own mechanisms for provider + +* The Atlantis [Helm Chart](deployment.md#kubernetes-helm-chart) and + [AWS Fargate Module](deployment.md#aws-fargate) have their own mechanisms for provider credentials. Read their docs. * If you're running Atlantis in a cloud then many clouds have ways to give cloud API access to applications running on them, ex: - * [AWS EC2 Roles](https://registry.terraform.io/providers/hashicorp/aws/latest/docs) (Search for "EC2 Role") - * [GCE Instance Service Accounts](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference) + * [AWS EC2 Roles](https://registry.terraform.io/providers/hashicorp/aws/latest/docs) (Search for "EC2 Role") + * [GCE Instance Service Accounts](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference) * Many users set environment variables, ex. `AWS_ACCESS_KEY`, where Atlantis is running. * Others create the necessary config files, ex. `~/.aws/credentials`, where Atlantis is running. * Use the [HashiCorp Vault Provider](https://registry.terraform.io/providers/hashicorp/vault/latest/docs) @@ -22,10 +24,10 @@ As a general rule, if you can `ssh` or `exec` into the server where Atlantis is running and run `terraform` commands like you would locally, then Atlantis will work. ::: - ## AWS Specific Info ### Multiple AWS Accounts + Atlantis supports multiple AWS accounts through the use of Terraform's [AWS Authentication](https://registry.terraform.io/providers/hashicorp/aws/latest/docs) (Search for "Authentication"). @@ -41,6 +43,7 @@ won't work for multiple accounts since Atlantis wouldn't know which environment Terraform with. ### Assume Role Session Names + If you're using Terraform < 0.12, Atlantis injects 5 Terraform variables that can be used to dynamically name the assume role session name. Setting the `session_name` allows you to trace API calls made through Atlantis back to a specific user and repo via CloudWatch: @@ -89,5 +92,6 @@ You can still set these variables yourself using the `extra_args` configuration. ::: ## Next Steps -* If you want to configure Atlantis further, read [Configuring Atlantis](configuring-atlantis.html) -* If you're ready to use Atlantis, read [Using Atlantis](using-atlantis.html) + +* If you want to configure Atlantis further, read [Configuring Atlantis](configuring-atlantis.md) +* If you're ready to use Atlantis, read [Using Atlantis](using-atlantis.md) diff --git a/runatlantis.io/docs/repo-and-project-permissions.md b/runatlantis.io/docs/repo-and-project-permissions.md new file mode 100644 index 0000000000..5999b3a5b3 --- /dev/null +++ b/runatlantis.io/docs/repo-and-project-permissions.md @@ -0,0 +1,174 @@ +# Repo and Project Permissions + +Sometimes it may be necessary to limit who can run which commands, such as +restricting who can apply changes to production, while allowing more +freedom for dev and test environments. + +## Authorization Workflow + +Atlantis performs two authorization checks to verify a user has the necessary +permissions to run a command: + +1. After a command has been validated, before var files, repo metadata, or + pull request statuses are checked and validated. +2. After pre workflow hooks have run, repo configuration processed, and + affected projects determined. + +::: tip Note +The first check should be considered as validating the user for a repository +as a whole, while the second check is for validating a user for a specific +project in that repo. +::: + +### Why check permissions twice? + +The way Atlantis is currently designed, not all relevant information may be +available when the first check happens. In particular, affected projects +are not known because pre workflow hooks haven't run yet, so repositories +that use hooks to generate or modify repo configurations won't know which +projects to check permissions for. + +## Configuring permissions + +Atlantis has two options for allowing instance administrators to configure +permissions. + +### Server option [`--gh-team-allowlist`](server-configuration.md#gh-team-allowlist) + +The `--gh-team-allowlist` option allows administrators to configure a global +set of permissions that apply to all repositories. For most use cases, this +should be sufficient. + +### External command + +For administrators that require more granular and specific permission +definitions, an external command can be defined in the [server side repo +configuration](server-side-repo-config.md#teamauthz). This command will receive +information about the command, repo, project, and GitHub teams the user is a +member of, allowing administrators to integrate the permissions validation +with other systems or business requirements. An example would be allowing +users to apply changes to lower environments like dev and test environments +while restricting changes to production or other sensitive environments. + +::: warning +These options are mutually exclusive. If an external command is defined, +the `--gh-team-allowlist` option is ignored. +::: + +## Example + +### Restrict production changes + +This example shows a simple example of how a script could be used to restrict +production changes to a specific team, while allowing anyone to work on other +environments. For brevity, this example assumes each user is a member of a +single team. + +`server-side-repo-config.yaml` + +```yaml +team_authz: + command: "/scripts/example.sh" +``` + +`example.sh` + +```shell +#!/bin/bash + +# Define name of team allowed to make production changes +PROD_TEAM="example-org/prod-deployers" + +# Set variables from command-line arguments for convenience +COMMAND="$1" +REPO="$2" +TEAM="$3" + +# Check if we are running the 'apply' command on prod +if [ "${COMMAND}" == "apply" -a "${PROJECT_NAME}" == "prod" ] +then + # Only the prod team can make this change + if [ "${TEAM}" == "${PROD_TEAM}" ] + then + echo "pass" + exit 0 + fi + + # Print reason for failing and exit + echo "user \"${USER_NAME}\" must be a member of \"${PROD_TEAM}\" to apply changes to production." + exit 0 +fi + +# Any other command and environment is okay +echo "pass" +exit 0 +``` + +## Reference + +### External Command Execution + +External commands are executed on every authorization check with arguments and +environment variables containing context about the command being checked. The +command is executed using the following format: + +```shell +external_command [external_args...] atlantis_command repo [teams...] +``` + +| Key | Optional | Description | +|--------------------|----------|-------------------------------------------------------------------------------------------| +| `external_command` | no | Command defined in [server side repo configuration](server-side-repo-config.md) | +| `external_args` | yes | Command arguments defined in [server side repo configuration](server-side-repo-config.md) | +| `atlantis_command` | no | The atlantis command being run (`plan`, `apply`, etc) | +| `repo` | no | The full name of the repo being executed (format: `owner/repo_name`) | +| `teams` | yes | A list of zero or more teams of the user executing the command | + +The following environment variables are passed to the command on every execution: + +| Key | Description | +|----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `BASE_REPO_NAME` | Name of the repository that the pull request will be merged into, ex. `atlantis`. | +| `BASE_REPO_OWNER` | Owner of the repository that the pull request will be merged into, ex. `runatlantis`. | +| `COMMAND_NAME` | The name of the command that is being executed, i.e. `plan`, `apply` etc. | +| `USER_NAME` | Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. | + +The following environment variables are also passed to the command when checking project authorization: + +| Key | Description | +|----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `BASE_BRANCH_NAME` | Name of the base branch of the pull request (the branch that the pull request is getting merged into) | +| `COMMENT_ARGS` | Any additional flags passed in the comment on the pull request. Flags are separated by commas and every character is escaped, ex. `atlantis plan -- arg1 arg2` will result in `COMMENT_ARGS=\a\r\g\1,\a\r\g\2`. | +| `HEAD_REPO_NAME` | Name of the repository that is getting merged into the base repository, ex. `atlantis`. | +| `HEAD_REPO_OWNER` | Owner of the repository that is getting merged into the base repository, ex. `acme-corp`. | +| `HEAD_BRANCH_NAME` | Name of the head branch of the pull request (the branch that is getting merged into the base) | +| `HEAD_COMMIT` | The sha256 that points to the head of the branch that is being pull requested into the base. If the pull request is from Bitbucket Cloud the string will only be 12 characters long because Bitbucket Cloud truncates its commit IDs. | +| `PROJECT_NAME` | Name of the project the command is being executed on | +| `PULL_NUM` | Pull request number or ID, ex. `2`. | +| `PULL_URL` | Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. | +| `PULL_AUTHOR` | Username of the pull request author, ex. `acme-user`. | +| `REPO_ROOT` | The absolute path to the root of the cloned repository. | +| `REPO_REL_PATH` | Path to the project relative to `REPO_ROOT` | + +### External Command Result Handling + +Atlantis determines if a user is authorized to run the requested command by +checking if the external command exited with code `0` and if the last line +of output is `pass`. + +```text +# Psuedo-code of Atlantis evaluation of external commands + +user_authorized = + external_command.exit_code == 0 + && external_command.output.last_line == 'pass' +``` + +::: tip + +* A non-zero exit code means the command failed to evaluate the request for +some reason (bad configuration, missing dependencies, solar flares, etc). +* If the command was able to run successfully, but determined the user is not +authorized, it should still exit with code `0`. + * The command output could contain the reasoning for the authorization failure. +::: diff --git a/runatlantis.io/docs/repo-level-atlantis-yaml.md b/runatlantis.io/docs/repo-level-atlantis-yaml.md index c4c6ed3792..11feb31224 100644 --- a/runatlantis.io/docs/repo-level-atlantis-yaml.md +++ b/runatlantis.io/docs/repo-level-atlantis-yaml.md @@ -1,31 +1,33 @@ # Repo Level atlantis.yaml Config + An `atlantis.yaml` file specified at the root of a Terraform repo allows you to instruct Atlantis on the structure of your repo and set custom workflows. -[[toc]] - ## Do I need an atlantis.yaml file? + `atlantis.yaml` files are only required if you wish to customize some aspect of Atlantis. The default Atlantis config works for many users without changes. Read through the [use-cases](#use-cases) to determine if you need it. ## Enabling atlantis.yaml + By default, all repos are allowed to have an `atlantis.yaml` file, but some of the keys are restricted by default. Restricted keys can be set in the server-side `repos.yaml` repo config file. You can enable `atlantis.yaml` to override restricted -keys by setting the `allowed_overrides` key there. See the [Server Side Repo Config](server-side-repo-config.html) for +keys by setting the `allowed_overrides` key there. See the [Server Side Repo Config](server-side-repo-config.md) for more details. -**Notes** +**Notes:** + * By default, repo root `atlantis.yaml` file is used. -* You can change this behaviour by setting [Server Side Repo Config](server-side-repo-config.html) +* You can change this behaviour by setting [Server Side Repo Config](server-side-repo-config.md) ::: danger DANGER Atlantis uses the `atlantis.yaml` version from the pull request, similar to other -CI/CD systems. If you're allowing users to [create custom workflows](server-side-repo-config.html#allow-repos-to-define-their-own-workflows) +CI/CD systems. If you're allowing users to [create custom workflows](server-side-repo-config.md#allow-repos-to-define-their-own-workflows) then this means anyone that can create a pull request to your repo can run arbitrary code on the Atlantis server. @@ -66,7 +68,9 @@ projects: workspace: default terraform_version: v0.11.0 delete_source_branch_on_merge: true - repo_locking: true + repo_locking: true # deprecated: use repo_locks instead + repo_locks: + mode: on_plan custom_policy_check: false autoplan: when_modified: ["*.tf", "../modules/**/*.tf", ".terraform.lock.hcl"] @@ -74,6 +78,7 @@ projects: plan_requirements: [mergeable, approved, undiverged] apply_requirements: [mergeable, approved, undiverged] import_requirements: [mergeable, approved, undiverged] + silence_pr_comments: ["apply"] execution_order_group: 1 depends_on: - project-1 @@ -147,7 +152,9 @@ grep -P 'backend[\s]+"s3"' **/*.tf | ``` ## Use Cases + ### Disabling Autoplanning + ```yaml version: 3 projects: @@ -155,6 +162,7 @@ projects: autoplan: enabled: false ``` + This will stop Atlantis automatically running plan when `project1/` is updated in a pull request. @@ -178,7 +186,7 @@ Parallel plans and applies work across both multiple directories and multiple wo Given the directory structure: -``` +```plain . ├── modules │   └── module1 @@ -193,7 +201,6 @@ Given the directory structure: If you want Atlantis to plan `project1/` whenever any `.tf` files under `module1/` change or any `.tf` or `.tfvars` files under `project1/` change you could use the following configuration: - ```yaml version: 3 projects: @@ -203,12 +210,14 @@ projects: ``` Note: + * `when_modified` uses the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file) * The paths are relative to the project's directory. * `when_modified` will be used by both automatic and manually run plans. * `when_modified` will continue to work for manually run plans even when autoplan is disabled. ### Supporting Terraform Workspaces + ```yaml version: 3 projects: @@ -217,34 +226,44 @@ projects: - dir: project1 workspace: production ``` + With the above config, when Atlantis determines that the configuration for the `project1` dir has changed, it will run plan for both the `staging` and `production` workspaces. If you want to `plan` or `apply` for a specific workspace you can use -``` + +```shell atlantis plan -w staging -d project1 ``` + and -``` + +```shell atlantis apply -w staging -d project1 ``` ### Using .tfvars files -See [Custom Workflow Use Cases: Using .tfvars files](custom-workflows.html#tfvars-files) + +See [Custom Workflow Use Cases: Using .tfvars files](custom-workflows.md#tfvars-files) ### Adding extra arguments to Terraform commands -See [Custom Workflow Use Cases: Adding extra arguments to Terraform commands](custom-workflows.html#adding-extra-arguments-to-terraform-commands) + +See [Custom Workflow Use Cases: Adding extra arguments to Terraform commands](custom-workflows.md#adding-extra-arguments-to-terraform-commands) ### Custom init/plan/apply Commands -See [Custom Workflow Use Cases: Custom init/plan/apply Commands](custom-workflows.html#custom-init-plan-apply-commands) + +See [Custom Workflow Use Cases: Custom init/plan/apply Commands](custom-workflows.md#custom-init-plan-apply-commands) ### Terragrunt -See [Custom Workflow Use Cases: Terragrunt](custom-workflows.html#terragrunt) + +See [Custom Workflow Use Cases: Terragrunt](custom-workflows.md#terragrunt) ### Running custom commands -See [Custom Workflow Use Cases: Running custom commands](custom-workflows.html#running-custom-commands) + +See [Custom Workflow Use Cases: Running custom commands](custom-workflows.md#running-custom-commands) ### Terraform Versions + If you'd like to use a different version of Terraform than what is in Atlantis' `PATH` or is set by the `--default-tf-version` flag, then set the `terraform_version` key: @@ -258,7 +277,9 @@ projects: Atlantis will automatically download and use this version. ### Requiring Approvals For Production + In this example, we only want to require `apply` approvals for the `production` directory. + ```yaml version: 3 projects: @@ -268,12 +289,14 @@ projects: apply_requirements: [approved] import_requirements: [approved] ``` + :::warning `plan_requirements`, `apply_requirements` and `import_requirements` are restricted keys so this repo will need to be configured -to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.html#repos-can-set-their-own-apply-an-applicable-subcommand). +to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.md#repos-can-set-their-own-apply-an-applicable-subcommand). ::: ### Order of planning/applying + ```yaml version: 3 abort_on_execution_order_fail: true @@ -283,12 +306,13 @@ projects: - dir: project2 execution_order_group: 1 ``` + With this config above, Atlantis runs planning/applying for project2 first, then for project1. Several projects can have same `execution_order_group`. Any order in one group isn't guaranteed. `parallel_plan` and `parallel_apply` respect these order groups, so parallel planning/applying works -in each group one by one. +in each group one by one. -If any plan/apply fails and `abort_on_execution_order_fail` is set to true on a repo level, all the +If any plan/apply fails and `abort_on_execution_order_fail` is set to true on a repo level, all the following groups will be aborted. For this example, if project2 fails then project1 will not run. Execution order groups are useful when you have dependencies between projects. However, they are only applicable in the case where @@ -296,6 +320,7 @@ you initiate a global apply for all of your projects, i.e `atlantis apply`. If y Thus, the `depends_on` key is more useful in this case. and can be used in conjunction with execution order groups. The following configuration is an example of how to use execution order groups and depends_on together to enforce dependencies between projects. + ```yaml version: 3 projects: @@ -323,6 +348,7 @@ projects: workspace: production workflow: infra ``` + the `depends_on` feature will make sure that `production` is not applied before `staging` for example. ::: tip @@ -331,11 +357,14 @@ What Happens if one or more project's dependencies are not applied? If there's one or more projects in the dependency list which is not in applied status, users will see an error message like this: `Can't apply your project unless you apply its dependencies` ::: + ### Autodiscovery Config + ```yaml autodiscover: mode: "auto" ``` + The above is the default configuration for `autodiscover.mode`. When `autodiscover.mode` is auto, projects will be discovered only if the repo has no `projects` configured. @@ -343,14 +372,16 @@ projects will be discovered only if the repo has no `projects` configured. autodiscover: mode: "disabled" ``` + With the config above, Atlantis will never try to discover projects, even when there are no `projects` configured. This is useful if dynamically generating Atlantis config in pre_workflow hooks. -See [Dynamic Repo Config Generation](pre-workflow-hooks.html#dynamic-repo-config-generation). +See [Dynamic Repo Config Generation](pre-workflow-hooks.md#dynamic-repo-config-generation). ```yaml autodiscover: mode: "enabled" ``` + With the config above, Atlantis will unconditionally try to discover projects based on modified_files, even when the directory of the project is missing from the configured `projects` in the repo configuration. If a discovered project has the same directory as a project which was manually configured in `projects`, @@ -360,10 +391,13 @@ Use this feature when some projects require specific configuration in a repo wit it's still desirable for Atlantis to plan/apply for projects not enumerated in the config. ### Custom Backend Config -See [Custom Workflow Use Cases: Custom Backend Config](custom-workflows.html#custom-backend-config) + +See [Custom Workflow Use Cases: Custom Backend Config](custom-workflows.md#custom-backend-config) ## Reference + ### Top-Level Keys + ```yaml version: 3 automerge: false @@ -372,16 +406,18 @@ projects: workflows: allowed_regexp_prefixes: ``` -| Key | Type | Default | Required | Description | -|-------------------------------|----------------------------------------------------------|---------|----------|--------------------------------------------------------------------------------------------------------------------------------------| -| version | int | none | **yes** | This key is required and must be set to `3`. | -| automerge | bool | `false` | no | Automatically merges pull request when all plans are applied. | -| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | -| projects | array[[Project](repo-level-atlantis-yaml.html#project)] | `[]` | no | Lists the projects in this repo. | -| workflows<br />*(restricted)* | map[string: [Workflow](custom-workflows.html#reference)] | `{}` | no | Custom workflows. | -| allowed_regexp_prefixes | array[string] | `[]` | no | Lists the allowed regexp prefixes to use when the [`--enable-regexp-cmd`](server-configuration.html#enable-regexp-cmd) flag is used. | + +| Key | Type | Default | Required | Description | +|-------------------------------|--------------------------------------------------------|---------|----------|------------------------------------------------------------------------------------------------------------------------------------| +| version | int | none | **yes** | This key is required and must be set to `3`. | +| automerge | bool | `false` | no | Automatically merges pull request when all plans are applied. | +| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | +| projects | array[[Project](repo-level-atlantis-yaml.md#project)] | `[]` | no | Lists the projects in this repo. | +| workflows<br />*(restricted)* | map[string: [Workflow](custom-workflows.md#reference)] | `{}` | no | Custom workflows. | +| allowed_regexp_prefixes | array\[string\] | `[]` | no | Lists the allowed regexp prefixes to use when the [`--enable-regexp-cmd`](server-configuration.md#enable-regexp-cmd) flag is used. | ### Project + ```yaml name: myname branch: /mybranch/ @@ -389,45 +425,62 @@ dir: mydir workspace: myworkspace execution_order_group: 0 delete_source_branch_on_merge: false -repo_locking: true +repo_locking: true # deprecated: use repo_locks instead +repo_locks: + mode: on_plan custom_policy_check: false autoplan: terraform_version: 0.11.0 plan_requirements: ["approved"] apply_requirements: ["approved"] import_requirements: ["approved"] +silence_pr_comments: ["apply"] workflow: myworkflow ``` -| Key | Type | Default | Required | Description | -|------------------------------------------|-----------------------|-------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| name | string | none | maybe | Required if there is more than one project with the same `dir` and `workspace`. This project name can be used with the `-p` flag. | -| branch | string | none | no | Regex matching projects by the base branch of pull request (the branch the pull request is getting merged into). Only projects that match the PR's branch will be considered. By default, all branches are matched. | -| dir | string | none | **yes** | The directory of this project relative to the repo root. For example if the project was under `./project1` then use `project1`. Use `.` to indicate the repo root. | -| workspace | string | `"default"` | no | The [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) for this project. Atlantis will switch to this workplace when planning/applying and will create it if it doesn't exist. | -| execution_order_group | int | `0` | no | Index of execution order group. Projects will be sort by this field before planning/applying. | -| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | -| repo_locking | bool | `true` | no | Get a repository lock in this project when plan. | -| custom_policy_check | bool | `false` | no | Enable using policy check tools other than Conftest | -| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.html). | -| terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Must be [Semver compatible](https://semver.org/), ex. `v0.11.0`, `0.12.0-beta1`. | -| plan_requirements<br />*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| apply_requirements<br />*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| import_requirements<br />*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| workflow <br />*(restricted)* | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow. | +| Key | Type | Default | Required | Description | +|-----------------------------------------|-------------------------|-----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| name | string | none | maybe | Required if there is more than one project with the same `dir` and `workspace`. This project name can be used with the `-p` flag. | +| branch | string | none | no | Regex matching projects by the base branch of pull request (the branch the pull request is getting merged into). Only projects that match the PR's branch will be considered. By default, all branches are matched. | +| dir | string | none | **yes** | The directory of this project relative to the repo root. For example if the project was under `./project1` then use `project1`. Use `.` to indicate the repo root. | +| workspace | string | `"default"` | no | The [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) for this project. Atlantis will switch to this workplace when planning/applying and will create it if it doesn't exist. | +| execution_order_group | int | `0` | no | Index of execution order group. Projects will be sort by this field before planning/applying. | +| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | +| repo_locking | bool | `true` | no | (deprecated) Get a repository lock in this project when plan. | +| repo_locks | [RepoLocks](#repolocks) | `mode: on_plan` | no | Get a repository lock in this project on plan or apply. See [RepoLocks](#repolocks) for more details. | +| custom_policy_check | bool | `false` | no | Enable using policy check tools other than Conftest | +| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.md). | +| terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Must be [Semver compatible](https://semver.org/), ex. `v0.11.0`, `0.12.0-beta1`. | +| plan_requirements<br />*(restricted)* | array\[string\] | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| apply_requirements<br />*(restricted)* | array\[string\] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| import_requirements<br />*(restricted)* | array\[string\] | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| silence_pr_comments | array\[string\] | none | no | Silence PR comments from defined stages while preserving PR status checks. Supported values are: `plan`, `apply`. | +| workflow <br />*(restricted)* | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow. | ::: tip A project represents a Terraform state. Typically, there is one state per directory and workspace however it's possible to have multiple states in the same directory using `terraform init -backend-config=custom-config.tfvars`. -Atlantis supports this but requires the `name` key to be specified. See [Custom Backend Config](custom-workflows.html#custom-backend-config) for more details. +Atlantis supports this but requires the `name` key to be specified. See [Custom Backend Config](custom-workflows.md#custom-backend-config) for more details. ::: ### Autoplan + ```yaml enabled: true when_modified: ["*.tf", "terragrunt.hcl", ".terraform.lock.hcl"] ``` -| Key | Type | Default | Required | Description | -|-----------------------|---------------|----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| enabled | boolean | `true` | no | Whether autoplanning is enabled for this project. | -| when_modified | array[string] | `["**/*.tf*"]` | no | Uses [.dockerignore](https://docs.docker.com/engine/reference/builder/#dockerignore-file) syntax. If any modified file in the pull request matches, this project will be planned. See [Autoplanning](autoplanning.html). Paths are relative to the project's dir. | + +| Key | Type | Default | Required | Description | +|-----------------------|-----------------|----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| enabled | boolean | `true` | no | Whether autoplanning is enabled for this project. | +| when_modified | array\[string\] | `["**/*.tf*"]` | no | Uses [.dockerignore](https://docs.docker.com/engine/reference/builder/#dockerignore-file) syntax. If any modified file in the pull request matches, this project will be planned. See [Autoplanning](autoplanning.md). Paths are relative to the project's dir. | + +### RepoLocks + +```yaml +mode: on_apply +``` + +| Key | Type | Default | Required | Description | +|------|--------|-----------|----------|---------------------------------------------------------------------------------------------------------------------------------------| +| mode | `Mode` | `on_plan` | no | Whether or not repository locks are enabled for this project on plan or apply. Valid values are `disabled`, `on_plan` and `on_apply`. | diff --git a/runatlantis.io/docs/requirements.md b/runatlantis.io/docs/requirements.md index e300e63fe7..a661ac3508 100644 --- a/runatlantis.io/docs/requirements.md +++ b/runatlantis.io/docs/requirements.md @@ -1,19 +1,21 @@ # Requirements + Atlantis works with most Git hosts and Terraform setups. Read on to confirm it works with yours. -[[toc]] - ## Git Host + Atlantis integrates with the following Git hosts: * GitHub (public, private or enterprise) * GitLab (public, private or enterprise) +* Gitea (public, private and compatible forks like Forgejo) * Bitbucket Cloud aka bitbucket.org (public or private) * Bitbucket Server aka Stash * Azure DevOps ## Terraform State + Atlantis supports all backend types **except for local state**. We don't support local state because Atlantis does not have permanent storage and it doesn't commit the new statefile back to version control. @@ -24,17 +26,20 @@ storage from Terraform Cloud. This is fully supported by Atlantis. ::: ## Repository Structure + Atlantis supports any Terraform repository structure, for example: ### Single Terraform Project At Repo Root -``` + +```plain . ├── main.tf └── ... ``` ### Multiple Project Folders -``` + +```plain . ├── project1 │   ├── main.tf @@ -45,7 +50,8 @@ Atlantis supports any Terraform repository structure, for example: ``` ### Modules -``` + +```plain . ├── project1 │   ├── main.tf @@ -55,35 +61,42 @@ Atlantis supports any Terraform repository structure, for example:    ├── main.tf └── ... ``` + With modules, if you want `project1` automatically planned when `module1` is modified -you need to create an `atlantis.yaml` file. See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#configuring-planning) for more details. +you need to create an `atlantis.yaml` file. See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#configuring-planning) for more details. + +### Terraform Workspaces -### Terraform Workspaces *See [Terraform's docs](https://developer.hashicorp.com/terraform/language/state/workspaces) if you are unfamiliar with workspaces.* If you're using Terraform `>= 0.9.0`, Atlantis supports workspaces through an `atlantis.yaml` file that tells Atlantis the names of your workspaces -(see [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#supporting-terraform-workspaces) for more details) +(see [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#supporting-terraform-workspaces) for more details) ### .tfvars Files -``` + +```plain . ├── production.tfvars │── staging.tfvars └── main.tf ``` + For Atlantis to be able to plan automatically with `.tfvars files`, you need to create an `atlantis.yaml` file to tell it to use `-var-file={YOUR_FILE}`. -See [atlantis.yaml Use Cases](custom-workflows.html#tfvars-files) for more details. +See [atlantis.yaml Use Cases](custom-workflows.md#tfvars-files) for more details. ### Multiple Repos + Atlantis supports multiple repos as well–as long as there is a webhook configured for each repo. ## Terraform Versions + Atlantis supports all Terraform versions (including 0.12) and can be configured -to use different versions for different repositories/projects. See [Terraform Versions](terraform-versions.html). +to use different versions for different repositories/projects. See [Terraform Versions](terraform-versions.md). ## Next Steps + * If your Terraform setup meets the Atlantis requirements, continue the installation - guide and set up your [Git Host Access Credentials](access-credentials.html) + guide and set up your [Git Host Access Credentials](access-credentials.md) diff --git a/runatlantis.io/docs/security.md b/runatlantis.io/docs/security.md index a689bb4182..69d96faf2a 100644 --- a/runatlantis.io/docs/security.md +++ b/runatlantis.io/docs/security.md @@ -1,13 +1,16 @@ # Security -[[toc]] + ## Exploits + Because you usually run Atlantis on a server with credentials that allow access to your infrastructure it's important that you deploy Atlantis securely. Atlantis could be exploited by + * An attacker submitting a pull request that contains a malicious Terraform file that uses a malicious provider or an [`external` data source](https://registry.terraform.io/providers/hashicorp/external/latest/docs/data-sources/data_source) that Atlantis then runs `terraform plan` on (which it does automatically unless you've turned off automatic plans). * Running `terraform apply` on a malicious Terraform file with [local-exec](https://developer.hashicorp.com/terraform/language/resources/provisioners/local-exec) + ```tf resource "null_resource" "null" { provisioner "local-exec" { @@ -15,20 +18,26 @@ Atlantis could be exploited by } } ``` + * Running malicious custom build commands specified in an `atlantis.yaml` file. Atlantis uses the `atlantis.yaml` file from the pull request branch, **not** `main`. * Someone adding `atlantis plan/apply` comments on your valid pull requests causing terraform to run when you don't want it to. - +* ## Mitigations + ### Don't Use On Public Repos + Because anyone can comment on public pull requests, even with all the security mitigations available, it's still dangerous to run Atlantis on public repos without proper configuration of the security settings. ### Don't Use `--allow-fork-prs` + If you're running on a public repo (which isn't recommended, see above) you shouldn't set `--allow-fork-prs` (defaults to false) because anyone can open up a pull request from their fork to your repo. ### `--repo-allowlist` + Atlantis requires you to specify a allowlist of repositories it will accept webhooks from via the `--repo-allowlist` flag. For example: + * Specific repositories: `--repo-allowlist=github.com/runatlantis/atlantis,github.com/runatlantis/atlantis-tests` * Your whole organization: `--repo-allowlist=github.com/runatlantis/*` * Every repository in your GitHub Enterprise install: `--repo-allowlist=github.yourcompany.com/*` @@ -38,19 +47,22 @@ For example: This flag ensures your Atlantis install isn't being used with repositories you don't control. See `atlantis server --help` for more details. ### Protect Terraform Planning + If attackers submitting pull requests with malicious Terraform code is in your threat model then you must be aware that `terraform apply` approvals are not enough. It is possible to run malicious code in a `terraform plan` using the [`external` data source](https://registry.terraform.io/providers/hashicorp/external/latest/docs/data-sources/data_source) or by specifying a malicious provider. This code could then exfiltrate your credentials. To prevent this, you could: + 1. Bake providers into the Atlantis image or host and deny egress in production. 1. Implement the provider registry protocol internally and deny public egress, that way you control who has write access to the registry. -1. Modify your [server-side repo configuration](https://www.runatlantis.io/docs/server-side-repo-config.html)'s `plan` step to validate against the +1. Modify your [server-side repo configuration](server-side-repo-config.md)'s `plan` step to validate against the use of disallowed providers or data sources or PRs from not allowed users. You could also add in extra validation at this point, e.g. requiring a "thumbs-up" on the PR before allowing the `plan` to continue. Conftest could be of use here. ### `--var-file-allowlist` + The files on your Atlantis install may be accessible as [variable definition files](https://developer.hashicorp.com/terraform/language/values/variables#variable-definitions-tfvars-files) from pull requests by adding `atlantis plan -- -var-file=/path/to/file` comments. To mitigate this security risk, Atlantis has limited such access @@ -58,6 +70,7 @@ only to the files allowlisted by the `--var-file-allowlist` flag. If this argume Atlantis' data directory. ### Webhook Secrets + Atlantis should be run with Webhook secrets set via the `$ATLANTIS_GH_WEBHOOK_SECRET`/`$ATLANTIS_GITLAB_WEBHOOK_SECRET` environment variables. Even with the `--repo-allowlist` flag set, without a webhook secret, attackers could make requests to Atlantis posing as a repository that is allowlisted. Webhook secrets ensure that the webhook requests are actually coming from your VCS provider (GitHub or GitLab). @@ -67,17 +80,20 @@ If you are using Azure DevOps, instead of webhook secrets add a [basic username ::: ### Azure DevOps Basic Authentication + Azure DevOps supports sending a basic authentication header in all webhook events. This requires using an HTTPS URL for your webhook location. ### SSL/HTTPS + If you're using webhook secrets but your traffic is over HTTP then the webhook secrets could be stolen. Enable SSL/HTTPS using the `--ssl-cert-file` and `--ssl-key-file` flags. ### Enable Authentication on Atlantis Web Server + It is very recommended to enable authentication in the web service. Enable BasicAuth using the `--web-basic-auth=true` and setup a username and a password using `--web-username=yourUsername` and `--web-password=yourPassword` flags. -You can also pass these as environment variables `ATLANTIS_WEB_BASIC_AUTH=true` `ATLANTIS_WEB_USERNAME=yourUsername` and `ATLANTIS_WEB_PASSWORD=yourPassword`. +You can also pass these as environment variables `ATLANTIS_WEB_BASIC_AUTH=true` `ATLANTIS_WEB_USERNAME=yourUsername` and `ATLANTIS_WEB_PASSWORD=yourPassword`. :::tip Tip We do encourage the usage of complex passwords in order to prevent basic bruteforcing attacks. diff --git a/runatlantis.io/docs/server-configuration.md b/runatlantis.io/docs/server-configuration.md index bafaa24c78..6017022b97 100644 --- a/runatlantis.io/docs/server-configuration.md +++ b/runatlantis.io/docs/server-configuration.md @@ -1,12 +1,12 @@ # Server Configuration + This page explains how to configure the `atlantis server` command. Configuration to `atlantis server` can be specified via command line flags, environment variables, a config file or a mix of the three. -[[toc]] - ## Environment Variables + All flags can be specified as environment variables. 1. Take the flag name, ex. `--gh-user` @@ -24,11 +24,13 @@ The flag `--atlantis-url` is set by the environment variable `ATLANTIS_ATLANTIS_ ::: ## Config File + All flags can also be specified via a YAML config file. To use a YAML config file, run `atlantis server --config /path/to/config.yaml`. The keys of your config file should be the same as the flag names, ex. + ```yaml gh-token: ... log-level: ... @@ -40,40 +42,49 @@ The `--config` config file is only used as an alternate way of setting `atlantis ::: ## Precedence + Values are chosen in this order: + 1. Flags 1. Environment Variables 1. Config File - ## Flags + ### `--allow-commands` + ```bash atlantis server --allow-commands=version,plan,apply,unlock,approve_policies # or ATLANTIS_ALLOW_COMMANDS='version,plan,apply,unlock,approve_policies' ``` + List of allowed commands to be run on the Atlantis server, Defaults to `version,plan,apply,unlock,approve_policies` Notes: - * Accepts a comma separated list, ex. `command1,command2`. - * `version`, `plan`, `apply`, `unlock`, `approve_policies`, `import`, `state` and `all` are available. - * `all` is a special keyword that allows all commands. If pass `all` then all other commands will be ignored. + +* Accepts a comma separated list, ex. `command1,command2`. +* `version`, `plan`, `apply`, `unlock`, `approve_policies`, `import`, `state` and `all` are available. +* `all` is a special keyword that allows all commands. If pass `all` then all other commands will be ignored. ### `--allow-draft-prs` + ```bash atlantis server --allow-draft-prs # or ATLANTIS_ALLOW_DRAFT_PRS=true ``` + Respond to pull requests from draft prs. Defaults to `false`. ### `--allow-fork-prs` + ```bash atlantis server --allow-fork-prs # or ATLANTIS_ALLOW_FORK_PRS=true ``` + Respond to pull requests from forks. Defaults to `false`. :::warning SECURITY WARNING @@ -85,33 +96,40 @@ Values are chosen in this order: ::: ### `--api-secret` + ```bash atlantis server --api-secret="secret" # or (recommended) ATLANTIS_API_SECRET="secret" ``` - Required secret used to validate requests made to the [`/api/*` endpoints](api-endpoints.html). + + Required secret used to validate requests made to the [`/api/*` endpoints](api-endpoints.md). ### `--atlantis-url` + ```bash atlantis server --atlantis-url="https://my-domain.com:9090/basepath" # or ATLANTIS_ATLANTIS_URL=https://my-domain.com:9090/basepath ``` + Specify the URL that Atlantis is accessible from. Used in the Atlantis UI and in links from pull request comments. Defaults to `http://$(hostname):$port` where `$port` is from the [`--port`](#port) flag. Supports a basepath if you're hosting Atlantis under a path. Notes: - * If a load balancer with a non http/https port (not the one defined in the `--port` flag) is used, update the URL to include the port like in the example above. - * This URL is used as the `details` link next to each atlantis job to view the job's logs. + +* If a load balancer with a non http/https port (not the one defined in the `--port` flag) is used, update the URL to include the port like in the example above. +* This URL is used as the `details` link next to each atlantis job to view the job's logs. ### `--autodiscover-mode` + ```bash atlantis server --autodiscover-mode="<auto|enabled|disabled>" # or ATLANTIS_AUTODISCOVER_MODE="<auto|enabled|disabled>" ``` + Sets auto discover mode, default is `auto`. When set to `auto`, projects in a repo will be discovered by Atlantis when there are no projects configured in the repo config. If one or more projects are defined in the repo config then auto discovery will be completely disabled. @@ -123,40 +141,45 @@ Values are chosen in this order: When set to `disabled` projects will never be discovered, even if there are no projects configured in the repo config. ### `--automerge` + ```bash atlantis server --automerge # or ATLANTIS_AUTOMERGE=true ``` + Automatically merge pull requests after all plans have been successfully applied. - Defaults to `false`. See [Automerging](automerging.html) for more details. + Defaults to `false`. See [Automerging](automerging.md) for more details. ### `--autoplan-file-list` + ```bash # NOTE: Use single quotes to avoid shell expansion of *. atlantis server --autoplan-file-list='**/*.tf,project1/*.pkr.hcl' # or ATLANTIS_AUTOPLAN_FILE_LIST='**/*.tf,project1/*.pkr.hcl' ``` + List of file patterns that Atlantis will use to check if a directory contains modified files that should trigger project planning. Notes: - * Accepts a comma separated list, ex. `pattern1,pattern2`. - * Patterns use the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file) - * List of file patterns will be used by both automatic and manually run plans. - * When not set, defaults to all `.tf`, `.tfvars`, `.tfvars.json`, `terragrunt.hcl` and `.terraform.lock.hcl` files + +* Accepts a comma separated list, ex. `pattern1,pattern2`. +* Patterns use the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file) +* List of file patterns will be used by both automatic and manually run plans. +* When not set, defaults to all `.tf`, `.tfvars`, `.tfvars.json`, `terragrunt.hcl` and `.terraform.lock.hcl` files (`--autoplan-file-list='**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl,**/.terraform.lock.hcl'`). - * Setting `--autoplan-file-list` will override the defaults. You **must** add `**/*.tf` and other defaults if you want to include them. - * A custom [Workflow](repo-level-atlantis-yaml.html#configuring-planning) that uses autoplan `when_modified` will ignore this value. +* Setting `--autoplan-file-list` will override the defaults. You **must** add `**/*.tf` and other defaults if you want to include them. +* A custom [Workflow](repo-level-atlantis-yaml.md#configuring-planning) that uses autoplan `when_modified` will ignore this value. Examples: - * Autoplan when any `*.tf` or `*.tfvars` file is modified. - * `--autoplan-file-list='**/*.tf,**/*.tfvars'` - * Autoplan when any `*.tf` file is modified except in `project2/` directory - * `--autoplan-file-list='**/*.tf,!project2'` - * Autoplan when any `*.tf` files or `.yml` files in subfolder of `project1` is modified. - * `--autoplan-file-list='**/*.tf,project2/**/*.yml'` +* Autoplan when any `*.tf` or `*.tfvars` file is modified. + * `--autoplan-file-list='**/*.tf,**/*.tfvars'` +* Autoplan when any `*.tf` file is modified except in `project2/` directory + * `--autoplan-file-list='**/*.tf,!project2'` +* Autoplan when any `*.tf` files or `.yml` files in subfolder of `project1` is modified. + * `--autoplan-file-list='**/*.tf,project2/**/*.yml'` ::: warning NOTE By default, changes to modules will not trigger autoplanning. See the flags below. @@ -202,8 +225,8 @@ Current default is "" (disabled). Examples: - * `**/*.tf` - will index all projects that have a `.tf` file in their directory, and plan them whenever an in-repo module dependency has changed. - * `**/*.tf,!foo,!bar` - will index all projects containing `.tf` except `foo` and `bar` and plan them whenever an in-repo module dependency has changed. +* `**/*.tf` - will index all projects that have a `.tf` file in their directory, and plan them whenever an in-repo module dependency has changed. +* `**/*.tf,!foo,!bar` - will index all projects containing `.tf` except `foo` and `bar` and plan them whenever an in-repo module dependency has changed. This allows projects to opt-out of auto-planning when a module dependency changes. ::: warning NOTE @@ -217,35 +240,43 @@ and set `--autoplan-modules` to `false`. ::: ### `--azuredevops-hostname` + ```bash atlantis server --azuredevops-hostname="dev.azure.com" # or ATLANTIS_AZUREDEVOPS_HOSTNAME="dev.azure.com" ``` + Azure DevOps hostname to support cloud and self hosted instances. Defaults to `dev.azure.com`. ### `--azuredevops-token` + ```bash atlantis server --azuredevops-token="RandomStringProducedByAzureDevOps" # or (recommended) ATLANTIS_AZUREDEVOPS_TOKEN="RandomStringProducedByAzureDevOps" ``` + Azure DevOps token of API user. ### `--azuredevops-user` + ```bash atlantis server --azuredevops-user="username@example.com" # or ATLANTIS_AZUREDEVOPS_USER="username@example.com" ``` + Azure DevOps username of API user. ### `--azuredevops-webhook-password` + ```bash atlantis server --azuredevops-webhook-password="password123" # or (recommended) ATLANTIS_AZUREDEVOPS_WEBHOOK_PASSWORD="password123" ``` + Azure DevOps basic authentication password for inbound webhooks (see [docs](https://docs.microsoft.com/en-us/azure/devops/service-hooks/authorize?view=azure-devops)). @@ -258,45 +289,55 @@ and set `--autoplan-modules` to `false`. ::: ### `--azuredevops-webhook-user` + ```bash atlantis server --azuredevops-webhook-user="username@example.com" # or ATLANTIS_AZUREDEVOPS_WEBHOOK_USER="username@example.com" ``` + Azure DevOps basic authentication username for inbound webhooks. ### `--bitbucket-base-url` + ```bash atlantis server --bitbucket-base-url="http://bitbucket.corp:7990/basepath" # or ATLANTIS_BITBUCKET_BASE_URL="http://bitbucket.corp:7990/basepath" ``` + Base URL of Bitbucket Server (aka Stash) installation. Must include `http://` or `https://`. If using Bitbucket Cloud (bitbucket.org), do not set. Defaults to `https://api.bitbucket.org`. ### `--bitbucket-token` + ```bash atlantis server --bitbucket-token="token" # or (recommended) ATLANTIS_BITBUCKET_TOKEN="token" ``` + Bitbucket app password of API user. ### `--bitbucket-user` + ```bash atlantis server --bitbucket-user="myuser" # or ATLANTIS_BITBUCKET_USER="myuser" ``` + Bitbucket username of API user. ### `--bitbucket-webhook-secret` + ```bash atlantis server --bitbucket-webhook-secret="secret" # or (recommended) ATLANTIS_BITBUCKET_WEBHOOK_SECRET="secret" ``` + Secret used to validate Bitbucket webhooks. ::: warning SECURITY WARNING @@ -305,143 +346,173 @@ and set `--autoplan-modules` to `false`. ::: ### `--checkout-depth` + ```bash atlantis server --checkout-depth=0 # or ATLANTIS_CHECKOUT_DEPTH=0 ``` + The number of commits to fetch from the branch. Used if `--checkout-strategy=merge` since the `--checkout-strategy=branch` (default) checkout strategy always defaults to a shallow clone using a depth of 1. - Defaults to `0`. See [Checkout Strategy](checkout-strategy.html) for more details. + Defaults to `0`. See [Checkout Strategy](checkout-strategy.md) for more details. ### `--checkout-strategy` + ```bash atlantis server --checkout-strategy="<branch|merge>" # or ATLANTIS_CHECKOUT_STRATEGY="<branch|merge>" ``` + How to check out pull requests. Use either `branch` or `merge`. - Defaults to `branch`. See [Checkout Strategy](checkout-strategy.html) for more details. + Defaults to `branch`. See [Checkout Strategy](checkout-strategy.md) for more details. ### `--config` + ```bash atlantis server --config="my/config/file.yaml" # or ATLANTIS_CONFIG="my/config/file.yaml" ``` + YAML config file where flags can also be set. See [Config File](#config-file) for more details. ### `--data-dir` + ```bash atlantis server --data-dir="path/to/data/dir" # or ATLANTIS_DATA_DIR="path/to/data/dir" ``` + Directory where Atlantis will store its data. Will be created if it doesn't exist. Defaults to `~/.atlantis`. Atlantis will store its database, checked out repos, Terraform plans and downloaded - Terraform binaries here. If Atlantis loses this directory, [locks](locking.html) + Terraform binaries here. If Atlantis loses this directory, [locks](locking.md) will be lost and unapplied plans will be lost. Note that the atlantis user is restricted to `~/.atlantis`. If you set the `--data-dir` flag to a path outside of Atlantis its home directory, ensure that you grant the atlantis user the correct permissions. ### `--default-tf-version` + ```bash atlantis server --default-tf-version="v0.12.31" # or ATLANTIS_DEFAULT_TF_VERSION="v0.12.31" ``` + Terraform version to default to. Will download to `<data-dir>/bin/terraform<version>` - if not in `PATH`. See [Terraform Versions](terraform-versions.html) for more details. + if not in `PATH`. See [Terraform Versions](terraform-versions.md) for more details. ### `--disable-apply-all` + ```bash atlantis server --disable-apply-all # or ATLANTIS_DISABLE_APPLY_ALL=true ``` + Disable `atlantis apply` command so a specific project/workspace/directory has to be specified for applies. ### `--disable-autoplan` + ```bash atlantis server --disable-autoplan # or ATLANTIS_DISABLE_AUTOPLAN=true ``` + Disable atlantis auto planning. ### `--disable-autoplan-label` + ```bash atlantis server --disable-autoplan-label="no-autoplan" # or ATLANTIS_DISABLE_AUTOPLAN_LABEL="no-autoplan" ``` + Disable atlantis auto planning only on pull requests with the specified label. If `disable-autoplan` property is `true`, this flag has no effect. ### `--disable-markdown-folding` + ```bash atlantis server --disable-markdown-folding # or - ATLANTIS_DISABLE_MARKDOWN_FOLDER=true + ATLANTIS_DISABLE_MARKDOWN_FOLDING=true ``` + Disable folding in markdown output using the `<details>` html tag. ### `--disable-repo-locking` + ```bash atlantis server --disable-repo-locking # or ATLANTIS_DISABLE_REPO_LOCKING=true ``` + Stops atlantis from locking projects and or workspaces when running terraform. ### `--disable-unlock-label` + ```bash atlantis server --disable-unlock-label do-not-unlock # or ATLANTIS_DISABLE_UNLOCK_LABEL="do-not-unlock" ``` + Stops atlantis from unlocking a pull request with this label. Defaults to "" (feature disabled). ### `--emoji-reaction` + ```bash atlantis server --emoji-reaction thumbsup # or ATLANTIS_EMOJI_REACTION=thumbsup ``` - The emoji reaction to use for marking processed comments. Currently supported on Azure DevOps, GitHub and GitLab. - Defaults to `eyes`. + + The emoji reaction to use for marking processed comments. Currently supported on Azure DevOps, GitHub and GitLab. If not specified, Atlantis will not use an emoji reaction. + Defaults to "" (empty string). ### `--enable-diff-markdown-format` + ```bash atlantis server --enable-diff-markdown-format # or ATLANTIS_ENABLE_DIFF_MARKDOWN_FORMAT=true ``` + Enable Atlantis to format Terraform plan output into a markdown-diff friendly format for color-coding purposes. Useful to enable for use with GitHub. ### `--enable-policy-checks` + ```bash atlantis server --enable-policy-checks # or ATLANTIS_ENABLE_POLICY_CHECKS=true ``` - Enables atlantis to run server side policies on the result of a terraform plan. Policies are defined in [server side repo config](https://www.runatlantis.io/docs/server-side-repo-config.html#reference). + + Enables atlantis to run server side policies on the result of a terraform plan. Policies are defined in [server side repo config](server-side-repo-config.md#reference). ### `--enable-regexp-cmd` + ```bash atlantis server --enable-regexp-cmd # or ATLANTIS_ENABLE_REGEXP_CMD=true ``` + Enable Atlantis to use regular expressions to run plan/apply commands against defined project names when `-p` flag is passed with it. This can be used to run all defined projects (with the `name` key) in `atlantis.yaml` using `atlantis plan -p .*`. - The flag will only allow the regexes listed in the [`allowed_regexp_prefixes`](https://www.runatlantis.io/docs/repo-level-atlantis-yaml.html#reference) key defined in the repo `atlantis.yaml` file. If the key is undefined, its value defaults to `[]` which will allow any regex. + The flag will only allow the regexes listed in the [`allowed_regexp_prefixes`](repo-level-atlantis-yaml.md#reference) key defined in the repo `atlantis.yaml` file. If the key is undefined, its value defaults to `[]` which will allow any regex. This will not work with `-d` yet and to use `-p` the repo projects must be defined in the repo `atlantis.yaml` file. @@ -453,16 +524,19 @@ and set `--autoplan-modules` to `false`. ::: ### `--executable-name` + ```bash atlantis server --executable-name="atlantis" # or ATLANTIS_EXECUTABLE_NAME="atlantis" ``` + Comment command trigger executable name. Defaults to `atlantis`. This is useful when running multiple Atlantis servers against a single repository. ### `--fail-on-pre-workflow-hook-error` + ```bash atlantis server --fail-on-pre-workflow-hook-error # or @@ -471,32 +545,96 @@ and set `--autoplan-modules` to `false`. Fail and do not run the requested Atlantis command if any of the pre workflow hooks error. +### `--gitea-base-url` + + ```bash + atlantis server --gitea-base-url="http://your-gitea.corp:7990/basepath" + # or + ATLANTIS_GITEA_BASE_URL="http://your-gitea.corp:7990/basepath" + ``` + + Base URL of Gitea installation. Must include `http://` or `https://`. Defaults to `https://gitea.com` if left empty/absent. + +### `--gitea-token` + + ```bash + atlantis server --gitea-token="token" + # or (recommended) + ATLANTIS_GITEA_TOKEN="token" + ``` + + Gitea app password of API user. + +### `--gitea-user` + + ```bash + atlantis server --gitea-user="myuser" + # or + ATLANTIS_GITEA_USER="myuser" + ``` + + Gitea username of API user. + +### `--gitea-webhook-secret` + + ```bash + atlantis server --gitea-webhook-secret="secret" + # or (recommended) + ATLANTIS_GITEA_WEBHOOK_SECRET="secret" + ``` + + Secret used to validate Gitea webhooks. + + ::: warning SECURITY WARNING + If not specified, Atlantis won't be able to validate that the incoming webhook call came from Gitea. + This means that an attacker could spoof calls to Atlantis and cause it to perform malicious actions. + ::: + +### `--gitea-page-size` + + ```bash + atlantis server --gitea-page-size=30 + # or (recommended) + ATLANTIS_GITEA_PAGE_SIZE=30 + ``` + + Number of items on a single page in Gitea paged responses. + + ::: warning Configuration dependent + The default value conforms to the Gitea server's standard config setting: DEFAULT_PAGING_NUM + The highest valid value depends on the Gitea server's config setting: MAX_RESPONSE_ITEMS + ::: + ### `--gh-allow-mergeable-bypass-apply` + ```bash atlantis server --gh-allow-mergeable-bypass-apply # or ATLANTIS_GH_ALLOW_MERGEABLE_BYPASS_APPLY=true ``` + Feature flag to enable ability to use `mergeable` mode with required apply status check. ### `--gh-app-id` + ```bash atlantis server --gh-app-id="00000" # or ATLANTIS_GH_APP_ID="00000" ``` + GitHub app ID. If set, GitHub authentication will be performed as [an installation](https://docs.github.com/en/rest/apps/installations). ::: tip A GitHub app can be created by starting Atlantis first, then pointing your browser at - ``` + ```shell $(hostname)/github-app/setup ``` You'll be redirected to GitHub to create a new app, and will then be redirected to - ``` + ```shell $(hostname)/github-app/exchange-code?code=some-code ``` @@ -504,11 +642,13 @@ and set `--autoplan-modules` to `false`. ::: ### `--gh-app-key` + ```bash atlantis server --gh-app-key="-----BEGIN RSA PRIVATE KEY-----(...)" # or ATLANTIS_GH_APP_KEY="-----BEGIN RSA PRIVATE KEY-----(...)" ``` + The PEM encoded private key for the GitHub App. ::: warning SECURITY WARNING @@ -516,44 +656,69 @@ and set `--autoplan-modules` to `false`. ::: ### `--gh-app-key-file` + ```bash atlantis server --gh-app-key-file="path/to/app-key.pem" # or ATLANTIS_GH_APP_KEY_FILE="path/to/app-key.pem" ``` + Path to a GitHub App PEM encoded private key file. If set, GitHub authentication will be performed as [an installation](https://docs.github.com/en/rest/apps/installations). ### `--gh-app-slug` + ```bash atlantis server --gh-app-slug="myappslug" # or ATLANTIS_GH_APP_SLUG="myappslug" ``` + A slugged version of GitHub app name shown in pull requests comments, etc (not `Atlantis App` but something like `atlantis-app`). Atlantis uses the value of this parameter to identify the comments it has left on GitHub pull requests. This is used for functions such as `--hide-prev-plan-comments`. You need to obtain this value from your GitHub app, one way is to go to your App settings and open "Public page" from the left sidebar. Your `--gh-app-slug` value will be the last part of the URL, e.g `https://github.com/apps/<slug>`. ### `--gh-hostname` + ```bash atlantis server --gh-hostname="my.github.enterprise.com" # or ATLANTIS_GH_HOSTNAME="my.github.enterprise.com" ``` + Hostname of your GitHub Enterprise installation. If using [GitHub.com](https://github.com), don't set. Defaults to `github.com`. +### `--gh-app-installation-id` + + ```bash + atlantis server --gh-app-installation-id="123" + # or + ATLANTIS_GH_APP_INSTALLATION_ID="123" + ``` + +The installation ID of a specific instance of a GitHub application. Normally this value is +derived by querying GitHub for the list of installations of the ID supplied via `--gh-app-id` and selecting +the first one found and where multiple installations results in an error. Use this flag if you have multiple +instances of Atlantis but you want to use a single already-installed GitHub app for all of them. You would normally do this if +you are running a proxy as your single GitHub application that will proxy to an appropriate Atlantis instance +based on the organization or user that triggered the webhook. + ### `--gh-org` + ```bash atlantis server --gh-org="myorgname" # or ATLANTIS_GH_ORG="myorgname" ``` + GitHub organization name. Set to enable creating a private GitHub app for this organization. ### `--gh-team-allowlist` + ```bash atlantis server --gh-team-allowlist="myteam:plan, secteam:apply, DevOps Team:apply, DevOps Team:import" # or ATLANTIS_GH_TEAM_ALLOWLIST="myteam:plan, secteam:apply, DevOps Team:apply, DevOps Team:import" ``` + In versions v0.21.0 and later, the GitHub team name can be a name or a slug. In versions v0.20.1 and below, the Github team name required the case sensitive team name. @@ -568,28 +733,44 @@ and set `--autoplan-modules` to `false`. ::: ### `--gh-token` + ```bash atlantis server --gh-token="token" # or (recommended) ATLANTIS_GH_TOKEN="token" ``` + GitHub token of API user. +### `--gh-token-file` + + ```bash + atlantis server --gh-token-file="/path/to/token" + # or + ATLANTIS_GH_TOKEN_FILE="/path/to/token" + ``` + + GitHub token of API user. The token is loaded from disk regularly to allow for rotation of the token without the need to restart the Atlantis server. + ### `--gh-user` + ```bash atlantis server --gh-user="myuser" # or ATLANTIS_GH_USER="myuser" ``` + GitHub username of API user. ### `--gh-webhook-secret` + ```bash atlantis server --gh-webhook-secret="secret" # or (recommended) ATLANTIS_GH_WEBHOOK_SECRET="secret" ``` - Secret used to validate GitHub webhooks (see [https://developer.github.com/webhooks/securing/](https://docs.github.com/en/developers/webhooks-and-events/webhooks/securing-your-webhooks)). + + Secret used to validate GitHub webhooks (see [GitHub: Validating webhook deliveries](https://docs.github.com/en/webhooks/using-webhooks/validating-webhook-deliveries)). ::: warning SECURITY WARNING If not specified, Atlantis won't be able to validate that the incoming webhook call came from GitHub. @@ -597,36 +778,44 @@ and set `--autoplan-modules` to `false`. ::: ### `--gitlab-hostname` + ```bash atlantis server --gitlab-hostname="my.gitlab.enterprise.com" # or ATLANTIS_GITLAB_HOSTNAME="my.gitlab.enterprise.com" ``` + Hostname of your GitLab Enterprise installation. If using [Gitlab.com](https://gitlab.com), don't set. Defaults to `gitlab.com`. ### `--gitlab-token` + ```bash atlantis server --gitlab-token="token" # or (recommended) ATLANTIS_GITLAB_TOKEN="token" ``` + GitLab token of API user. ### `--gitlab-user` + ```bash atlantis server --gitlab-user="myuser" # or ATLANTIS_GITLAB_USER="myuser" ``` + GitLab username of API user. ### `--gitlab-webhook-secret` + ```bash atlantis server --gitlab-webhook-secret="secret" # or (recommended) ATLANTIS_GITLAB_WEBHOOK_SECRET="secret" ``` + Secret used to validate GitLab webhooks. ::: warning SECURITY WARNING @@ -635,71 +824,100 @@ and set `--autoplan-modules` to `false`. ::: ### `--help` + ```bash atlantis server --help ``` + View help. ### `--hide-prev-plan-comments` + ```bash atlantis server --hide-prev-plan-comments # or ATLANTIS_HIDE_PREV_PLAN_COMMENTS=true ``` + Hide previous plan comments to declutter PRs. This is only supported in GitHub and GitLab currently. This is not enabled by default. When using Github App, you need to set `--gh-app-slug` to enable this feature. ### `--hide-unchanged-plan-comments` + ```bash atlantis server --hide-unchanged-plan-comments # or ATLANTIS_HIDE_UNCHANGED_PLAN_COMMENTS=true ``` + Remove no-changes plan comments from the pull request. This is useful when you have many projects and want to keep the pull request clean from useless comments. ### `--include-git-untracked-files` + ```bash atlantis server --include-git-untracked-files # or ATLANTIS_INCLUDE_GIT_UNTRACKED_FILES=true ``` + Include git untracked files in the Atlantis modified file list. Used for example with CDKTF pre-workflow hooks that dynamically generate Terraform files. +### `--ignore-vcs-status-names` + + ```bash + atlantis server --ignore-vcs-status-names="status1,status2" + # or + ATLANTIS_IGNORE_VCS_STATUS_NAMES=status1,status2 + ``` + + Comma separated list of VCS status names from other atlantis services. + When `gh-allow-mergeable-bypass-apply` is true, will ignore status checks + (e.g. `status1/plan`, `status1/apply`, `status2/plan`, `status2/apply`) + from other Atlantis services when checking if the PR is mergeable. + Currently only implemented for GitHub. + ### `--locking-db-type` + ```bash atlantis server --locking-db-type="<boltdb|redis>" # or ATLANTIS_LOCKING_DB_TYPE="<boltdb|redis>" ``` + The locking database type to use for storing plan and apply locks. Defaults to `boltdb`. Notes: - * If set to `boltdb`, only one process may have access to the boltdb instance. - * If set to `redis`, then `--redis-host`, `--redis-port`, and `--redis-password` must be set. + +* If set to `boltdb`, only one process may have access to the boltdb instance. +* If set to `redis`, then `--redis-host`, `--redis-port`, and `--redis-password` must be set. ### `--log-level` + ```bash atlantis server --log-level="<debug|info|warn|error>" # or ATLANTIS_LOG_LEVEL="<debug|info|warn|error>" ``` + Log level. Defaults to `info`. ### `--markdown-template-overrides-dir` + ```bash atlantis server --markdown-template-overrides-dir="path/to/templates/" # or ATLANTIS_MARKDOWN_TEMPLATE_OVERRIDES_DIR="path/to/templates/" ``` + This will be available in v0.21.0. Directory where Atlantis will read in overrides for markdown templates used to render comments on pull requests. Markdown template overrides may be specified either in individual files, or all together in a single file. All template - override files _must_ have the `.tmpl` extension, otherwise they will not be parsed. + override files *must* have the `.tmpl` extension, otherwise they will not be parsed. Markdown templates which may have overrides can be found [here](https://github.com/runatlantis/atlantis/tree/main/server/events/templates) @@ -708,68 +926,94 @@ This is useful when you have many projects and want to keep the pull request cle Defaults to the atlantis home directory `/home/atlantis/.markdown_templates/` in `/$HOME/.markdown_templates`. +### `--max-comments-per-command` + + ```bash + atlantis server --max-comments-per-command=100 + # or + ATLANTIS_MAX_COMMENTS_PER_COMMAND=100 + ``` + + Limit the number of comments published after a command is executed, to prevent spamming your VCS and Atlantis to get throttled as a result. Defaults to `100`. Set this option to `0` to disable log truncation. Note that the truncation will happen on the top of the command output, to preserve the most important parts of the output, often displayed at the end. + ### `--parallel-apply` + ```bash atlantis server --parallel-apply # or ATLANTIS_PARALLEL_APPLY=true ``` - Whether to run apply operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.html#run-plans-and-applies-in-parallel) takes precedence. + + Whether to run apply operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.md#run-plans-and-applies-in-parallel) takes precedence. ### `--parallel-plan` + ```bash atlantis server --parallel-plan # or ATLANTIS_PARALLEL_PLAN=true ``` - Whether to run plan operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.html#run-plans-and-applies-in-parallel) takes precedence. + + Whether to run plan operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.md#run-plans-and-applies-in-parallel) takes precedence. ### `--parallel-pool-size` + ```bash atlantis server --parallel-pool-size=100 # or ATLANTIS_PARALLEL_POOL_SIZE=100 ``` + Max size of the wait group that runs parallel plans and applies (if enabled). Defaults to `15` ### `--port` + ```bash atlantis server --port=4141 # or ATLANTIS_PORT=4141 ``` + Port to bind to. Defaults to `4141`. ### `--quiet-policy-checks` + ```bash atlantis server --quiet-policy-checks # or ATLANTIS_QUIET_POLICY_CHECKS=true ``` + Exclude policy check comments from pull requests unless there's an actual error from conftest. This also excludes warnings. Defaults to `false`. ### `--redis-db` + ```bash atlantis server --redis-db=0 # or ATLANTIS_REDIS_DB=0 ``` + The Redis Database to use when using a Locking DB type of `redis`. Defaults to `0`. ### `--redis-host` + ```bash atlantis server --redis-host="localhost" # or ATLANTIS_REDIS_HOST="localhost" ``` + The Redis Hostname for when using a Locking DB type of `redis`. ### `--redis-insecure-skip-verify` + ```bash atlantis server --redis-insecure-skip-verify=false # or ATLANTIS_REDIS_INSECURE_SKIP_VERIFY=false ``` + Controls whether the Redis client verifies the Redis server's certificate chain and host name. If true, accepts any certificate presented by the server and any host name in that certificate. Defaults to `false`. ::: warning SECURITY WARNING @@ -777,82 +1021,97 @@ This is useful when you have many projects and want to keep the pull request cle ::: ### `--redis-password` + ```bash atlantis server --redis-password="password123" # or (recommended) ATLANTIS_REDIS_PASSWORD="password123" ``` + The Redis Password for when using a Locking DB type of `redis`. ### `--redis-port` + ```bash atlantis server --redis-port=6379 # or ATLANTIS_REDIS_PORT=6379 ``` + The Redis Port for when using a Locking DB type of `redis`. Defaults to `6379`. ### `--redis-tls-enabled` + ```bash atlantis server --redis-tls-enabled=false # or ATLANTIS_REDIS_TLS_ENABLED=false ``` + Enables a TLS connection, with min version of 1.2, to Redis when using a Locking DB type of `redis`. Defaults to `false`. ### `--repo-allowlist` + ```bash # NOTE: Use single quotes to avoid shell expansion of *. atlantis server --repo-allowlist='github.com/myorg/*' # or ATLANTIS_REPO_ALLOWLIST='github.com/myorg/*' ``` + Atlantis requires you to specify an allowlist of repositories it will accept webhooks from. Notes: - * Accepts a comma separated list, ex. `definition1,definition2` - * Format is `{hostname}/{owner}/{repo}`, ex. `github.com/runatlantis/atlantis` - * `*` matches any characters, ex. `github.com/runatlantis/*` will match all repos in the runatlantis organization - * An entry beginning with `!` negates it, ex. `github.com/foo/*,!github.com/foo/bar` will match all github repos in the `foo` owner *except* `bar`. - * For Bitbucket Server: `{hostname}` is the domain without scheme and port, `{owner}` is the name of the project (not the key), and `{repo}` is the repo name - * User (not project) repositories take on the format: `{hostname}/{full name}/{repo}` (e.g., `bitbucket.example.com/Jane Doe/myatlantis` for username `jdoe` and full name `Jane Doe`, which is not very intuitive) - * For Azure DevOps the allowlist takes one of two forms: `{owner}.visualstudio.com/{project}/{repo}` or `dev.azure.com/{owner}/{project}/{repo}` - * Microsoft is in the process of changing Azure DevOps to the latter form, so it may be safest to always specify both formats in your repo allowlist for each repository until the change is complete. + +* Accepts a comma separated list, ex. `definition1,definition2` +* Format is `{hostname}/{owner}/{repo}`, ex. `github.com/runatlantis/atlantis` +* `*` matches any characters, ex. `github.com/runatlantis/*` will match all repos in the runatlantis organization +* An entry beginning with `!` negates it, ex. `github.com/foo/*,!github.com/foo/bar` will match all github repos in the `foo` owner *except* `bar`. +* For Bitbucket Server: `{hostname}` is the domain without scheme and port, `{owner}` is the name of the project (not the key), and `{repo}` is the repo name + * User (not project) repositories take on the format: `{hostname}/{full name}/{repo}` (e.g., `bitbucket.example.com/Jane Doe/myatlantis` for username `jdoe` and full name `Jane Doe`, which is not very intuitive) +* For Azure DevOps the allowlist takes one of two forms: `{owner}.visualstudio.com/{project}/{repo}` or `dev.azure.com/{owner}/{project}/{repo}` +* Microsoft is in the process of changing Azure DevOps to the latter form, so it may be safest to always specify both formats in your repo allowlist for each repository until the change is complete. Examples: - * Allowlist `myorg/repo1` and `myorg/repo2` on `github.com` - * `--repo-allowlist=github.com/myorg/repo1,github.com/myorg/repo2` - * Allowlist all repos under `myorg` on `github.com` - * `--repo-allowlist='github.com/myorg/*'` - * Allowlist all repos under `myorg` on `github.com`, excluding `myorg/untrusted-repo` - * `--repo-allowlist='github.com/myorg/*,!github.com/myorg/untrusted-repo'` - * Allowlist all repos in my GitHub Enterprise installation - * `--repo-allowlist='github.yourcompany.com/*'` - * Allowlist all repos under `myorg` project `myproject` on Azure DevOps - * `--repo-allowlist='myorg.visualstudio.com/myproject/*,dev.azure.com/myorg/myproject/*'` - * Allowlist all repositories - * `--repo-allowlist='*'` + +* Allowlist `myorg/repo1` and `myorg/repo2` on `github.com` + * `--repo-allowlist=github.com/myorg/repo1,github.com/myorg/repo2` +* Allowlist all repos under `myorg` on `github.com` + * `--repo-allowlist='github.com/myorg/*'` +* Allowlist all repos under `myorg` on `github.com`, excluding `myorg/untrusted-repo` + * `--repo-allowlist='github.com/myorg/*,!github.com/myorg/untrusted-repo'` +* Allowlist all repos in my GitHub Enterprise installation + * `--repo-allowlist='github.yourcompany.com/*'` +* Allowlist all repos under `myorg` project `myproject` on Azure DevOps + * `--repo-allowlist='myorg.visualstudio.com/myproject/*,dev.azure.com/myorg/myproject/*'` +* Allowlist all repositories + * `--repo-allowlist='*'` ### `--repo-config` + ```bash atlantis server --repo-config="path/to/repos.yaml" # or ATLANTIS_REPO_CONFIG="path/to/repos.yaml" ``` - Path to a YAML server-side repo config file. See [Server Side Repo Config](server-side-repo-config.html). + + Path to a YAML server-side repo config file. See [Server Side Repo Config](server-side-repo-config.md). ### `--repo-config-json` + ```bash atlantis server --repo-config-json='{"repos":[{"id":"/.*/", "apply_requirements":["mergeable"]}]}' # or ATLANTIS_REPO_CONFIG_JSON='{"repos":[{"id":"/.*/", "apply_requirements":["mergeable"]}]}' ``` + Specify server-side repo config as a JSON string. Useful if you don't want to write a config file to disk. - See [Server Side Repo Config](server-side-repo-config.html) for more details. + See [Server Side Repo Config](server-side-repo-config.md) for more details. ::: tip - If specifying a [Workflow](custom-workflows.html#reference), [step](custom-workflows.html#step)'s + If specifying a [Workflow](custom-workflows.md#reference), [step](custom-workflows.md#step)'s can be specified as follows: + ```json { "repos": [], @@ -875,25 +1134,30 @@ This is useful when you have many projects and want to keep the pull request cle } } ``` + ::: ### `--restrict-file-list` + ```bash atlantis server --restrict-file-list # or (recommended) ATLANTIS_RESTRICT_FILE_LIST=true ``` + `--restrict-file-list` will block plan requests from projects outside the files modified in the pull request. This will not block plan requests with regex if using the `--enable-regexp-cmd` flag, in these cases commands like `atlantis plan -p .*` will still work if used. normal commands will stil be blocked if necessary. Defaults to `false`. ### `--silence-allowlist-errors` + ```bash atlantis server --silence-allowlist-errors # or ATLANTIS_SILENCE_ALLOWLIST_ERRORS=true ``` + Some users use the `--repo-allowlist` flag to control which repos Atlantis responds to. Normally, if Atlantis receives a pull request webhook from a repo not listed in the allowlist, it will comment back with an error. This flag disables that commenting. @@ -902,20 +1166,24 @@ This is useful when you have many projects and want to keep the pull request cle at an organization level rather than on each repo. ### `--silence-fork-pr-errors` + ```bash atlantis server --silence-fork-pr-errors # or ATLANTIS_SILENCE_FORK_PR_ERRORS=true ``` + Normally, if Atlantis receives a pull request webhook from a fork and --allow-fork-prs is not set, it will comment back with an error. This flag disables that commenting. ### `--silence-no-projects` + ```bash atlantis server --silence-no-projects # or ATLANTIS_SILENCE_NO_PROJECTS=true ``` + `--silence-no-projects` will tell Atlantis to ignore PRs if none of the modified files are part of a project defined in the `atlantis.yaml` file. This flag ensures an Atlantis server only responds to its explicitly declared projects. This has no effect if projects are undefined in the repo level `atlantis.yaml`. @@ -925,185 +1193,237 @@ This is useful when you have many projects and want to keep the pull request cle delegate work to each Atlantis server. Also useful when used with pre_workflow_hooks to dynamically generate an `atlantis.yaml` file. ### `--silence-vcs-status-no-plans` + ```bash atlantis server --silence-vcs-status-no-plans # or ATLANTIS_SILENCE_VCS_STATUS_NO_PLANS=true ``` + `--silence-vcs-status-no-plans` will tell Atlantis to ignore setting VCS status on plans if none of the modified files are part of a project defined in the `atlantis.yaml` file. ### `--silence-vcs-status-no-projects` + ```bash atlantis server --silence-vcs-status-no-projects # or ATLANTIS_SILENCE_VCS_STATUS_NO_PROJECTS=true ``` + `--silence-vcs-status-no-projects` will tell Atlantis to ignore setting VCS status on any command if none of the modified files are part of a project defined in the `atlantis.yaml` file. ### `--skip-clone-no-changes` + ```bash atlantis server --skip-clone-no-changes # or ATLANTIS_SKIP_CLONE_NO_CHANGES=true ``` + `--skip-clone-no-changes` will skip cloning the repo during autoplan if there are no changes to Terraform projects. This will only apply for GitHub and GitLab and only for repos that have `atlantis.yaml` file. Defaults to `false`. ### `--slack-token` + ```bash atlantis server --slack-token=token # or (recommended) ATLANTIS_SLACK_TOKEN='token' ``` - API token for Slack notifications. Slack is not fully supported. TODO: Slack docs. + + API token for Slack notifications. See [Using Slack hooks](using-slack-hooks.md). ### `--ssl-cert-file` + ```bash atlantis server --ssl-cert-file="/etc/ssl/certs/my-cert.crt" # or ATLANTIS_SSL_CERT_FILE="/etc/ssl/certs/my-cert.crt" ``` + File containing x509 Certificate used for serving HTTPS. If the cert is signed by a CA, the file should be the concatenation of the server's certificate, any intermediates, and the CA's certificate. ### `--ssl-key-file` + ```bash atlantis server --ssl-key-file="/etc/ssl/private/my-cert.key" # or ATLANTIS_SSL_KEY_FILE="/etc/ssl/private/my-cert.key" ``` + File containing x509 private key matching `--ssl-cert-file`. ### `--stats-namespace` + ```bash atlantis server --stats-namespace="myatlantis" # or ATLANTIS_STATS_NAMESPACE="myatlantis" ``` - Namespace for emitting stats/metrics. See [stats](stats.html) section. + + Namespace for emitting stats/metrics. See [stats](stats.md) section. + +### `--tf-distribution` + + ```bash + atlantis server --tf-distribution="terraform" + # or + ATLANTIS_TF_DISTRIBUTION="terraform" + ``` + + Which TF distribution to use. Can be set to `terraform` or `opentofu`. ### `--tf-download` + ```bash atlantis server --tf-download=false # or ATLANTIS_TF_DOWNLOAD=false ``` + Defaults to `true`. Allow Atlantis to list and download additional versions of Terraform. Setting this to `false` can be useful in an air-gapped environment where a download mirror is not available. ### `--tf-download-url` + ```bash atlantis server --tf-download-url="https://releases.company.com" # or ATLANTIS_TF_DOWNLOAD_URL="https://releases.company.com" ``` + An alternative URL to download Terraform versions if they are missing. Useful in an airgapped environment where releases.hashicorp.com is not available. Directory structure of the custom endpoint should match that of releases.hashicorp.com. This has no impact if `--tf-download` is set to `false`. + This setting is not yet supported when `--tf-distribution` is set to `opentofu`. + ### `--tfe-hostname` + ```bash atlantis server --tfe-hostname="my-terraform-enterprise.company.com" # or ATLANTIS_TFE_HOSTNAME="my-terraform-enterprise.company.com" ``` + Hostname of your Terraform Enterprise installation to be used in conjunction with - `--tfe-token`. See [Terraform Cloud](terraform-cloud.html) for more details. + `--tfe-token`. See [Terraform Cloud](terraform-cloud.md) for more details. If using Terraform Cloud (i.e. you don't have your own Terraform Enterprise installation) no need to set since it defaults to `app.terraform.io`. ### `--tfe-local-execution-mode` + ```bash atlantis server --tfe-local-execution-mode # or ATLANTIS_TFE_LOCAL_EXECUTION_MODE=true ``` - Enable if you're using local execution mode (instead of TFE/C's remote execution mode). See [Terraform Cloud](terraform-cloud.html) for more details. + + Enable if you're using local execution mode (instead of TFE/C's remote execution mode). See [Terraform Cloud](terraform-cloud.md) for more details. ### `--tfe-token` + ```bash atlantis server --tfe-token="xxx.atlasv1.yyy" # or (recommended) ATLANTIS_TFE_TOKEN='xxx.atlasv1.yyy' ``` - A token for Terraform Cloud/Terraform Enterprise integration. See [Terraform Cloud](terraform-cloud.html) for more details. + + A token for Terraform Cloud/Terraform Enterprise integration. See [Terraform Cloud](terraform-cloud.md) for more details. ### `--use-tf-plugin-cache` + ```bash atlantis server --use-tf-plugin-cache=false # or ATLANTIS_USE_TF_PLUGIN_CACHE=false ``` + Set to false if you want to disable terraform plugin cache. This flag is useful when having multiple projects that need to run a plan and apply in the same PR to avoid the race condition of `plugin_cache_dir` concurrently, this is a terraform known issue, more info: -- [plugin_cache_dir concurrently discussion](https://github.com/hashicorp/terraform/issues/31964) -- [PR to improve the situation](https://github.com/hashicorp/terraform/pull/33479) +* [plugin_cache_dir concurrently discussion](https://github.com/hashicorp/terraform/issues/31964) +* [PR to improve the situation](https://github.com/hashicorp/terraform/pull/33479) The effect of the race condition is more evident when using parallel configuration to run plan and apply, by disabling the use of plugin cache will impact in the performance when starting a new plan or apply, but in large atlantis deployments with multiple projects and shared modules the use of `--parallel_plan` and `--parallel_apply` is mandatory for an efficient managment of the PRs. ### `--var-file-allowlist` + ```bash atlantis server --var-file-allowlist='/path/to/tfvars/dir' # or ATLANTIS_VAR_FILE_ALLOWLIST='/path/to/tfvars/dir' ``` + Comma-separated list of additional directory paths where [variable definition files](https://developer.hashicorp.com/terraform/language/values/variables#variable-definitions-tfvars-files) can be read from. The paths in this argument should be absolute paths. Relative paths and globbing are currently not supported. If this argument is not provided, it defaults to Atlantis' data directory, determined by the `--data-dir` argument. ### `--vcs-status-name` + ```bash atlantis server --vcs-status-name="atlantis-dev" # or ATLANTIS_VCS_STATUS_NAME="atlantis-dev" ``` + Name used to identify Atlantis when updating a pull request status. Defaults to `atlantis`. This is useful when running multiple Atlantis servers against a single repository so you can give each Atlantis server its own unique name to prevent the statuses clashing. ### `--web-basic-auth` + ```bash atlantis server --web-basic-auth # or ATLANTIS_WEB_BASIC_AUTH=true ``` + Enable Basic Authentication on the Atlantis web service. ### `--web-password` + ```bash atlantis server --web-password="atlantis" # or ATLANTIS_WEB_PASSWORD="atlantis" ``` + Password used for Basic Authentication on the Atlantis web service. Defaults to `atlantis`. ### `--web-username` + ```bash atlantis server --web-username="atlantis" # or ATLANTIS_WEB_USERNAME="atlantis" ``` + Username used for Basic Authentication on the Atlantis web service. Defaults to `atlantis`. ### `--websocket-check-origin` + ```bash atlantis server --websocket-check-origin # or ATLANTIS_WEBSOCKET_CHECK_ORIGIN=true ``` + Only allow websockets connection when they originate from the running Atlantis web server ### `--write-git-creds` + ```bash atlantis server --write-git-creds # or ATLANTIS_WRITE_GIT_CREDS=true ``` + Write out a .git-credentials file with the provider user and token to allow cloning private modules over HTTPS or SSH. See [here](https://git-scm.com/docs/git-credential-store) for more information. @@ -1120,4 +1440,3 @@ The effect of the race condition is more evident when using parallel configurati ::: warning SECURITY WARNING This does write secrets to disk and should only be enabled in a secure environment. ::: - diff --git a/runatlantis.io/docs/server-side-repo-config.md b/runatlantis.io/docs/server-side-repo-config.md index 77b44be4fa..2469eec4d7 100644 --- a/runatlantis.io/docs/server-side-repo-config.md +++ b/runatlantis.io/docs/server-side-repo-config.md @@ -1,27 +1,29 @@ -# Server Side Config +# Server Side Repo Config + A Server-Side Config file is used for more groups of server config that can't reasonably be expressed through flags. One such usecase is to control per-repo behaviour and what users can do in repo-level `atlantis.yaml` files. -[[toc]] - ## Do I Need A Server-Side Config File? + You do not need a server-side repo config file unless you want to customize some aspect of Atlantis on a per-repo basis. Read through the [use-cases](#use-cases) to determine if you need it. ## Enabling Server Side Config + To use server side repo config create a config file, ex. `repos.yaml`, and pass it to the `atlantis server` command via the `--repo-config` flag, ex. `--repo-config=path/to/repos.yaml`. If you don't wish to write a config file to disk, you can use the `--repo-config-json` flag or `ATLANTIS_REPO_CONFIG_JSON` environment variable -to specify your config as JSON. See [--repo-config-json](server-configuration.html#repo-config-json) +to specify your config as JSON. See [--repo-config-json](server-configuration.md#repo-config-json) for an example. - + ## Example Server Side Repo + ```yaml # repos lists the config for specific repos. repos: @@ -54,9 +56,9 @@ repos: # allowed_overrides specifies which keys can be overridden by this repo in # its atlantis.yaml file. - allowed_overrides: [apply_requirements, workflow, delete_source_branch_on_merge, repo_locking, custom_policy_check] + allowed_overrides: [apply_requirements, workflow, delete_source_branch_on_merge, repo_locking, repo_locks, custom_policy_check] - # allowed_workflows specifies which workflows the repos that match + # allowed_workflows specifies which workflows the repos that match # are allowed to select. allowed_workflows: [custom] @@ -71,18 +73,24 @@ repos: # repo_locking defines whether lock repository when planning. # If true (default), atlantis try to get a lock. + # deprecated: use repo_locks instead repo_locking: true + # repo_locks defines whether the repository would be locked on apply instead of plan, or disabled + # Valid values are on_plan (default), on_apply or disabled. + repo_locks: + mode: on_plan + # custom_policy_check defines whether policy checking tools besides Conftest are enabled in checks # If false (default), only Conftest JSON output is allowed custom_policy_check: false # pre_workflow_hooks defines arbitrary list of scripts to execute before workflow execution. - pre_workflow_hooks: + pre_workflow_hooks: - run: my-pre-workflow-hook-command arg1 - + # post_workflow_hooks defines arbitrary list of scripts to execute after workflow execution. - post_workflow_hooks: + post_workflow_hooks: - run: my-post-workflow-hook-command arg1 # policy_check defines if policy checking should be enable on this repository. @@ -112,13 +120,16 @@ workflows: ``` ## Use Cases + Here are some of the reasons you might want to use a repo config. ### Requiring PR Is Approved Before an applicable subcommand + If you want to require that all (or specific) repos must have pull requests approved before Atlantis will allow running `apply` or `import`, use the `plan_requirements`, `apply_requirements` or `import_requirements` keys. For all repos: + ```yaml # repos.yaml repos: @@ -129,6 +140,7 @@ repos: ``` For a specific repo: + ```yaml # repos.yaml repos: @@ -138,13 +150,15 @@ repos: import_requirements: [approved] ``` -See [Command Requirements](command-requirements.html) for more details. +See [Command Requirements](command-requirements.md) for more details. ### Requiring PR Is "Mergeable" Before Apply or Import + If you want to require that all (or specific) repos must have pull requests in a mergeable state before Atlantis will allow running `apply` or `import`, use the `plan_requirements`, `apply_requirements` or `import_requirements` keys. For all repos: + ```yaml # repos.yaml repos: @@ -155,6 +169,7 @@ repos: ``` For a specific repo: + ```yaml # repos.yaml repos: @@ -164,13 +179,15 @@ repos: import_requirements: [mergeable] ``` -See [Command Requirements](command-requirements.html) for more details. +See [Command Requirements](command-requirements.md) for more details. ### Repos Can Set Their Own Apply an applicable subcommand + If you want all (or specific) repos to be able to override the default apply requirements, use the `allowed_overrides` key. To allow all repos to override the default: + ```yaml # repos.yaml repos: @@ -183,7 +200,9 @@ repos: # But all repos can set their own using atlantis.yaml allowed_overrides: [plan_requirements, apply_requirements, import_requirements] ``` + To allow only a specific repo to override the default: + ```yaml # repos.yaml repos: @@ -200,6 +219,7 @@ repos: Then each allowed repo can have an `atlantis.yaml` file that sets `plan_requirements`, `apply_requirements` or `import_requirements` to an empty array (disabling the requirement). + ```yaml # atlantis.yaml in the repo root or set repo_config_file in repos.yaml version: 3 @@ -211,6 +231,7 @@ projects: ``` ### Running Scripts Before Atlantis Workflows + If you want to run scripts that would execute before Atlantis can run default or custom workflows, you can create a `pre-workflow-hooks`: @@ -222,10 +243,12 @@ repos: - run: | my bash script inline ``` -See [Pre Workflow Hooks](pre-workflow-hooks.html) for more details on writing + +See [Pre Workflow Hooks](pre-workflow-hooks.md) for more details on writing pre workflow hooks. ### Running Scripts After Atlantis Workflows + If you want to run scripts that would execute after Atlantis runs default or custom workflows, you can create a `post-workflow-hooks`: @@ -237,15 +260,18 @@ repos: - run: | my bash script inline ``` -See [Post Workflow Hooks](post-workflow-hooks.html) for more details on writing + +See [Post Workflow Hooks](post-workflow-hooks.md) for more details on writing post workflow hooks. ### Change The Default Atlantis Workflow + If you want to change the default commands that Atlantis runs during `plan` and `apply` phases, you can create a new `workflow`. If you want to use that workflow by default for all repos, use the workflow key `default`: + ```yaml # repos.yaml # NOTE: the repos key is not required. @@ -261,10 +287,11 @@ workflows: - run: my custom apply command ``` -See [Custom Workflows](custom-workflows.html) for more details on writing +See [Custom Workflows](custom-workflows.md) for more details on writing custom workflows. ### Allow Repos To Choose A Server-Side Workflow + If you want repos to be able to choose their own workflows that are defined in the server-side repo config, you need to create the workflows server-side and then allow each repo to override the `workflow` key: @@ -295,7 +322,8 @@ workflows: steps: - run: another custom command ``` -Or, if you want to restrict what workflows each repo has access to, use the `allowed_workflows` + +Or, if you want to restrict what workflows each repo has access to, use the `allowed_workflows` key: ```yaml @@ -345,13 +373,15 @@ There is always a workflow named `default` that corresponds to Atlantis' default unless you've created your own server-side workflow with that key (overriding it). ::: -See [Custom Workflows](custom-workflows.html) for more details on writing +See [Custom Workflows](custom-workflows.md) for more details on writing custom workflows. ### Allow Using Custom Policy Tools + Conftest is the standard policy check application integrated with Atlantis, but custom tools can still be run in custom workflows when the `custom_policy_check` option is set. See the [Custom Policy Checks page](custom-policy-checks.md) for detailed examples. ### Allow Repos To Define Their Own Workflows + If you want repos to be able to define their own workflows you need to allow them to override the `workflow` key and set `allow_custom_workflows` to `true`. @@ -375,6 +405,7 @@ repos: ``` Then each allowed repo can define and use a custom workflow in their `atlantis.yaml` files: + ```yaml # atlantis.yaml version: 3 @@ -392,12 +423,13 @@ workflows: - run: my custom apply command ``` -See [Custom Workflows](custom-workflows.html) for more details on writing +See [Custom Workflows](custom-workflows.md) for more details on writing custom workflows. ### Multiple Atlantis Servers Handle The Same Repository + Running multiple Atlantis servers to handle the same repository can be done to separate permissions for each Atlantis server. -In this case, a different [atlantis.yaml](repo-level-atlantis-yaml.html) repository config file can be used by using different `repos.yaml` files. +In this case, a different [atlantis.yaml](repo-level-atlantis-yaml.md) repository config file can be used by using different `repos.yaml` files. For example, consider a situation where a separate `production-server` atlantis uses repo config `atlantis-production.yaml` and `staging-server` atlantis uses repo config `atlantis-staging.yaml`. @@ -416,7 +448,7 @@ repos: ``` Then, create `atlantis-production.yaml` and `atlantis-staging.yaml` files in the repository. -See the configuration examples in [atlantis.yaml](repo-level-atlantis-yaml.html). +See the configuration examples in [atlantis.yaml](repo-level-atlantis-yaml.md). ```yaml # atlantis-production.yaml @@ -438,25 +470,30 @@ Now, 2 webhook URLs can be setup for the repository, which send events to `produ Each servers handle different repository config files. :::tip Notes -* If `no projects` comments are annoying, set [--silence-no-projects](server-configuration.html#silence-no-projects). -* The command trigger executable name can be reconfigured from `atlantis` to something else by setting [Executable Name](server-configuration.html#executable-name). + +* If `no projects` comments are annoying, set [--silence-no-projects](server-configuration.md#silence-no-projects). +* The command trigger executable name can be reconfigured from `atlantis` to something else by setting [Executable Name](server-configuration.md#executable-name). * When using different atlantis server vcs users such as `@atlantis-staging`, the comment `@atlantis-staging plan` can be used instead `atlantis plan` to call `staging-server` only. ::: ## Reference ### Top-Level Keys -| Key | Type | Default | Required | Description | -|-----------|---------------------------------------------------------|-----------|----------|---------------------------------------------------------------------------------------| -| repos | array[[Repo](#repo)] | see below | no | List of repos to apply settings to. | -| workflows | map[string: [Workflow](custom-workflows.html#workflow)] | see below | no | Map from workflow name to workflow. Workflows override the default Atlantis commands. | -| policies | Policies. | none | no | List of policy sets to run and associated metadata | -| metrics | Metrics. | none | no | Map of metric configuration | +| Key | Type | Default | Required | Description | +|------------|-------------------------------------------------------|-----------|----------|---------------------------------------------------------------------------------------| +| repos | array[[Repo](#repo)] | see below | no | List of repos to apply settings to. | +| workflows | map[string: [Workflow](custom-workflows.md#workflow)] | see below | no | Map from workflow name to workflow. Workflows override the default Atlantis commands. | +| policies | Policies. | none | no | List of policy sets to run and associated metadata | +| metrics | Metrics. | none | no | Map of metric configuration | +| team_authz | [TeamAuthz](#teamauthz) | none | no | Configuration of team permission checking | ::: tip A Note On Defaults + #### `repos` + `repos` always contains a first element with the Atlantis default config: + ```yaml repos: - id: /.*/ @@ -470,7 +507,9 @@ repos: ``` #### `workflows` + `workflows` always contains the Atlantis default workflow under the key `default`: + ```yaml workflows: default: @@ -485,29 +524,33 @@ If you set a workflow with the key `default`, it will override this. ::: ### Repo -| Key | Type | Default | Required | Description | -|-------------------------------|----------|---------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| id | string | none | yes | Value can be a regular expression when specified as /<regex>/ or an exact string match. Repo IDs are of the form `{vcs hostname}/{org}/{name}`, ex. `github.com/owner/repo`. Hostname is specified without scheme or port. For Bitbucket Server, {org} is the **name** of the project, not the key. | -| branch | string | none | no | An regex matching pull requests by base branch (the branch the pull request is getting merged into). By default, all branches are matched | -| repo_config_file | string | none | no | Repo config file path in this repo. By default, use `atlantis.yaml` which is located on repository root. When multiple atlantis servers work with the same repo, please set different file names. | -| workflow | string | none | no | A custom workflow. -| plan_requirements | []string | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | | -| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| import_requirements | []string | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| allowed_overrides | []string | none | no | A list of restricted keys that `atlantis.yaml` files can override. The only supported keys are `apply_requirements`, `workflow`, `delete_source_branch_on_merge`,`repo_locking`, and `custom_policy_check` | -| allowed_workflows | []string | none | no | A list of workflows that `atlantis.yaml` files can select from. | -| allow_custom_workflows | bool | false | no | Whether or not to allow [Custom Workflows](custom-workflows.html). | -| delete_source_branch_on_merge | bool | false | no | Whether or not to delete the source branch on merge. | -| repo_locking | bool | false | no | Whether or not to get a lock. | -| policy_check | bool | false | no | Whether or not to run policy checks on this repository. | -| custom_policy_check | bool | false | no | Whether or not to enable custom policy check tools outside of Conftest on this repository. | -| autodiscover | AutoDiscover | none | no | Auto discover settings for this repo +| Key | Type | Default | Required | Description | +|-------------------------------|-------------------------|-----------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| id | string | none | yes | Value can be a regular expression when specified as /<regex>/ or an exact string match. Repo IDs are of the form `{vcs hostname}/{org}/{name}`, ex. `github.com/owner/repo`. Hostname is specified without scheme or port. For Bitbucket Server, {org} is the **name** of the project, not the key. | +| branch | string | none | no | An regex matching pull requests by base branch (the branch the pull request is getting merged into). By default, all branches are matched | +| repo_config_file | string | none | no | Repo config file path in this repo. By default, use `atlantis.yaml` which is located on repository root. When multiple atlantis servers work with the same repo, please set different file names. | +| workflow | string | none | no | A custom workflow. | +| plan_requirements | []string | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| import_requirements | []string | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| allowed_overrides | []string | none | no | A list of restricted keys that `atlantis.yaml` files can override. The only supported keys are `apply_requirements`, `workflow`, `delete_source_branch_on_merge`,`repo_locking`, `repo_locks`, and `custom_policy_check` | +| allowed_workflows | []string | none | no | A list of workflows that `atlantis.yaml` files can select from. | +| allow_custom_workflows | bool | false | no | Whether or not to allow [Custom Workflows](custom-workflows.md). | +| delete_source_branch_on_merge | bool | false | no | Whether or not to delete the source branch on merge. | +| repo_locking | bool | false | no | (deprecated) Whether or not to get a lock. | +| repo_locks | [RepoLocks](#repolocks) | `mode: on_plan` | no | Whether or not repository locks are enabled for this project on plan or apply. See [RepoLocks](#repolocks) for more details. | +| policy_check | bool | false | no | Whether or not to run policy checks on this repository. | +| custom_policy_check | bool | false | no | Whether or not to enable custom policy check tools outside of Conftest on this repository. | +| autodiscover | AutoDiscover | none | no | Auto discover settings for this repo | +| silence_pr_comments | []string | none | no | Silence PR comments from defined stages while preserving PR status checks. Useful in large environments with many Atlantis instances and/or projects, when the comments are too big and too many, therefore it is preferable to rely solely on PR status checks. Supported values are: `plan`, `apply`. | :::tip Notes + * If multiple repos match, the last match will apply. * If a key isn't defined, it won't override a key that matched from above. For example, given a repo ID `github.com/owner/repo` and a config: + ```yaml repos: - id: /.*/ @@ -518,12 +561,14 @@ If you set a workflow with the key `default`, it will override this. ``` The final config will look like: + ```yaml apply_requirements: [] workflow: default allowed_overrides: [] allow_custom_workflows: true ``` + Where * `apply_requirements` is set from the `id: github.com/owner/repo` config because it overrides the previous matching config from `id: /.*/`. @@ -535,6 +580,16 @@ If you set a workflow with the key `default`, it will override this. by the `id: github.com/owner/repo` config because it didn't define that key. ::: +### RepoLocks + +```yaml +mode: on_apply +``` + +| Key | Type | Default | Required | Description | +|------|--------|-----------|----------|---------------------------------------------------------------------------------------------------------------------------------------| +| mode | `Mode` | `on_plan` | no | Whether or not repository locks are enabled for this project on plan or apply. Valid values are `disabled`, `on_plan` and `on_apply`. | + ### Policies | Key | Type | Default | Required | Description | @@ -545,6 +600,7 @@ If you set a workflow with the key `default`, it will override this. | policy_sets | []PolicySet | none | yes | set of policies to run on a plan output | ### Owners + | Key | Type | Default | Required | Description | |-------------|-------------------|---------|------------|---------------------------------------------------------| | users | []string | none | no | list of github users that can approve failing policies | @@ -552,12 +608,12 @@ If you set a workflow with the key `default`, it will override this. ### PolicySet -| Key | Type | Default | Required | Description | -| ------ | ------ | ------- | -------- | -------------------------------------- | -| name | string | none | yes | unique name for the policy set | -| path | string | none | yes | path to the rego policies directory | -| source | string | none | yes | only `local` is supported at this time | - +| Key | Type | Default | Required | Description | +| ------ | ------ | ------- | -------- | --------------------------------------------------------------------------------------------------------------| +| name | string | none | yes | unique name for the policy set | +| path | string | none | yes | path to the rego policies directory | +| source | string | none | yes | only `local` is supported at this time | +| prevent_self_approve | bool | false | no | Whether or not the author of PR can approve policies. Defaults to `false` (the author must also be in owners) | ### Metrics @@ -578,3 +634,10 @@ If you set a workflow with the key `default`, it will override this. | Key | Type | Default | Required | Description | | -------- | ------ | ------- | -------- | -------------------------------------- | | endpoint | string | none | yes | path to metrics endpoint | + +### TeamAuthz + +| Key | Type | Default | Required | Description | +|---------|----------|---------|----------|---------------------------------------------| +| command | string | none | yes | full path to external authorization command | +| args | []string | none | no | optional arguments to pass to `command` | diff --git a/runatlantis.io/docs/stats.md b/runatlantis.io/docs/stats.md index a2980c5634..9c6073ab64 100644 --- a/runatlantis.io/docs/stats.md +++ b/runatlantis.io/docs/stats.md @@ -8,12 +8,11 @@ Currently Statsd and Prometheus is supported. See configuration below for detail ## Configuration -Metrics are configured through the [Server Side Config](server-side-repo-config.html#metrics). +Metrics are configured through the [Server Side Config](server-side-repo-config.md#metrics). ## Available Metrics -Assuming metrics are exposed from the endpoint `/metrics` from the [metrics](server-side-repo-config.html#metrics) server side config e.g. - +Assuming metrics are exposed from the endpoint `/metrics` from the [metrics](server-side-repo-config.md#metrics) server side config e.g. ```yaml metrics: @@ -21,10 +20,8 @@ metrics: endpoint: "/metrics" ``` - To see all the metrics exposed from atlantis service, make a GET request to the `/metrics` endpoint. - ```bash curl localhost:4141/metrics # HELP atlantis_cmd_autoplan_builder_execution_error atlantis_cmd_autoplan_builder_execution_error counter @@ -47,20 +44,19 @@ atlantis_cmd_autoplan_builder_execution_time_count 10 ..... ``` - ::: tip NOTE The output shown above is trimmed, since with every new version release this metric set will need to be updated accordingly as there may be a case if some metrics are added/modified/deprecated, so the output shown above just gives a brief idea of how these metrics look like and rest can be explored. ::: Important metrics to monitor are -| Metric Name | Metric Type | Purpose | -|------------------------------------------------|----------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------| -| `atlantis_cmd_autoplan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.html#autoplanning) has thrown error. | -| `atlantis_cmd_comment_plan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis plan` has thrown error. | -| `atlantis_cmd_autoplan_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.html#autoplanning) has run successfully. | -| `atlantis_cmd_comment_apply_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has thrown error. | -| `atlantis_cmd_comment_apply_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has run successfully. | +| Metric Name | Metric Type | Purpose | +|------------------------------------------------|----------------------------------------------------------------------|-------------------------------------------------------------------------------------| +| `atlantis_cmd_autoplan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.md#autoplanning) has thrown error. | +| `atlantis_cmd_comment_plan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis plan` has thrown error. | +| `atlantis_cmd_autoplan_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.md#autoplanning) has run successfully. | +| `atlantis_cmd_comment_apply_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has thrown error. | +| `atlantis_cmd_comment_apply_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has run successfully. | ::: tip NOTE There are plenty of additional metrics exposed by atlantis that are not described above. diff --git a/runatlantis.io/docs/streaming-logs.md b/runatlantis.io/docs/streaming-logs.md index c066d47d47..df936c52f9 100644 --- a/runatlantis.io/docs/streaming-logs.md +++ b/runatlantis.io/docs/streaming-logs.md @@ -20,4 +20,3 @@ This will link to the atlantis UI which provides real-time logging in addition t ::: warning As of now the logs are currently stored in memory and cleared when a given pull request is closed, so this link shouldn't be persisted anywhere. ::: - diff --git a/runatlantis.io/docs/terraform-cloud.md b/runatlantis.io/docs/terraform-cloud.md index bab22a5db0..2e3393d7dd 100644 --- a/runatlantis.io/docs/terraform-cloud.md +++ b/runatlantis.io/docs/terraform-cloud.md @@ -6,15 +6,17 @@ and Private Terraform Enterprise was renamed Terraform Enterprise. ::: Atlantis integrates seamlessly with Terraform Cloud and Terraform Enterprise, whether you're using: + * [Free Remote State Management](https://app.terraform.io) * Terraform Cloud Paid Tiers * A Private Installation of Terraform Enterprise Read the docs below :point_down: depending on your use-case. -[[toc]] ## Using Atlantis With Free Remote State Storage + To use Atlantis with Free Remote State Storage, you need to: + 1. Migrate your state to Terraform Cloud. See [Migrating State from Local Terraform](https://developer.hashicorp.com/terraform/cloud-docs/migrate) 1. Update any projects that are referencing the state you migrated to use the new location 1. [Generate a Terraform Cloud/Enterprise Token](#generating-a-terraform-cloud-enterprise-token) @@ -24,6 +26,7 @@ That's it! Atlantis will run as normal and your state will be stored in Terrafor Cloud. ## Using Atlantis With Terraform Cloud Remote Operations or Terraform Enterprise + Atlantis integrates with the full version of Terraform Cloud and Terraform Enterprise via the [remote backend](https://developer.hashicorp.com/terraform/language/settings/backends/remote). @@ -31,7 +34,9 @@ Atlantis will run `terraform` commands as usual, however those commands will actually be executed *remotely* in Terraform Cloud or Terraform Enterprise. ### Why? + Using Atlantis with Terraform Cloud or Terraform Enterprise gives you access to features like: + * Real-time streaming output * Ability to cancel in-progress commands * Secret variables @@ -40,28 +45,34 @@ Using Atlantis with Terraform Cloud or Terraform Enterprise gives you access to **Without** having to change your pull request workflow. ### Getting Started + To use Atlantis with Terraform Cloud Remote Operations or Terraform Enterprise, you need to: + 1. Migrate your state to Terraform Cloud/Enterprise. See [Migrating State from Local Terraform](https://developer.hashicorp.com/terraform/cloud-docs/migrate) 1. Update any projects that are referencing the state you migrated to use the new location 1. [Generate a Terraform Cloud/Enterprise Token](#generating-a-terraform-cloud-enterprise-token) 1. [Pass the token to Atlantis](#passing-the-token-to-atlantis) ## Generating a Terraform Cloud/Enterprise Token + Atlantis needs a Terraform Cloud/Enterprise Token that it will use to access the API. Using a **Team Token is recommended**, however you can also use a User Token. ### Team Token + To generate a team token, click on **Settings** in the top bar, then **Teams** in the sidebar. Choose an existing team or create a new one. Enable the **Manage Workspaces** permission, then scroll down to **Team API Token**. ### User Token + To generate a user token, click on your avatar, then **User Settings**, then **Tokens** in the sidebar. Ensure the **Manage Workspaces** permission is enabled for this user's team. ## Passing The Token To Atlantis + The token can be passed to Atlantis via the `ATLANTIS_TFE_TOKEN` environment variable. You can also use the `--tfe-token` flag, however your token would then be easily @@ -88,12 +99,14 @@ Under the hood, Atlantis is generating a `~/.terraformrc` file. If you already had a `~/.terraformrc` file where Atlantis is running, then you'll need to manually add the credentials block to that file: -``` + +```hcl ... credentials "app.terraform.io" { token = "xxxx" } ``` + instead of using the `ATLANTIS_TFE_TOKEN` environment variable, since Atlantis won't overwrite your `.terraformrc` file. ::: diff --git a/runatlantis.io/docs/terraform-versions.md b/runatlantis.io/docs/terraform-versions.md index 79fdee0db3..77b749a765 100644 --- a/runatlantis.io/docs/terraform-versions.md +++ b/runatlantis.io/docs/terraform-versions.md @@ -4,47 +4,60 @@ You can customize which version of Terraform Atlantis defaults to by setting the `--default-tf-version` flag (ex. `--default-tf-version=v1.3.7`). ## Via `atlantis.yaml` + If you wish to use a different version than the default for a specific repo or project, you need to create an `atlantis.yaml` file and set the `terraform_version` key: + ```yaml version: 3 projects: - dir: . terraform_version: v1.1.5 ``` -See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#terraform-versions) for more details. + +See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#terraform-versions) for more details. ## Via terraform config + Alternatively, one can use the terraform configuration block's `required_version` key to specify an exact version (`x.y.z` or `= x.y.z`), or as of [atlantis v0.21.0](https://github.com/runatlantis/atlantis/releases/tag/v0.21.0), a comparison or pessimistic [version constraint](https://developer.hashicorp.com/terraform/language/expressions/version-constraints#version-constraint-syntax): -#### Exactly version 1.2.9 + +### Exactly version 1.2.9 + ```tf terraform { required_version = "= 1.2.9" } ``` -#### Any patch/tiny version of minor version 1.2 (1.2.z) + +### Any patch/tiny version of minor version 1.2 (1.2.z) + ```tf terraform { required_version = "~> 1.2.0" } ``` -#### Any minor version of major version 1 (1.y.z) + +### Any minor version of major version 1 (1.y.z) + ```tf terraform { required_version = "~> 1.2" } ``` -#### Any version that is at least 1.2.0 + +### Any version that is at least 1.2.0 + ```tf terraform { required_version = ">= 1.2.0" } ``` -See [Terraform `required_version`](https://developer.hashicorp.com/terraform/language/settings#specifying-a-required-terraform-version) for reference. + +See [Terraform `required_version`](https://developer.hashicorp.com/terraform/language/terraform#terraform-required_version) for reference. ::: tip NOTE Atlantis will automatically download the latest version that fulfills the constraint specified. -A `terraform_version` specified in the `atlantis.yaml` file takes precedence over both the [`--default-tf-version`](server-configuration.html#default-tf-version) flag and the `required_version` in the terraform hcl. +A `terraform_version` specified in the `atlantis.yaml` file takes precedence over both the [`--default-tf-version`](server-configuration.md#default-tf-version) flag and the `required_version` in the terraform hcl. ::: ::: tip NOTE diff --git a/runatlantis.io/docs/troubleshooting-https.md b/runatlantis.io/docs/troubleshooting-https.md index 191a4b1242..f59058da1c 100644 --- a/runatlantis.io/docs/troubleshooting-https.md +++ b/runatlantis.io/docs/troubleshooting-https.md @@ -3,25 +3,24 @@ When using a self-signed certificate for Atlantis (with flags `--ssl-cert-file` and `--ssl-key-file`), there are a few considerations. -Atlantis uses the web server from the standard Go library, +Atlantis uses the web server from the standard Go library, the method name is [ListenAndServeTLS](https://pkg.go.dev/net/http#ListenAndServeTLS). `ListenAndServeTLS` acts identically to [ListenAndServe](https://pkg.go.dev/net/http#ListenAndServe), -except that it expects HTTPS connections. -Additionally, files containing a certificate and matching private key for the server must be provided. -If the certificate is signed by a certificate authority, -the file passed to `--ssl-cert-file` should be the concatenation of the server's certificate, any intermediates, and the CA's certificate. +except that it expects HTTPS connections. +Additionally, files containing a certificate and matching private key for the server must be provided. +If the certificate is signed by a certificate authority, +the file passed to `--ssl-cert-file` should be the concatenation of the server's certificate, any intermediates, and the CA's certificate. -If you have this error when specifying a TLS cert with a key: -``` +If you have this error when specifying a TLS cert with a key: + +```plain [ERROR] server.go:413 server: Tls: private key does not match public key ``` Check that the locally signed certificate authority is prepended to the self signed certificate. -A good example is shown at [Seth Vargo terraform implementation of atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64) +A good example is shown at [Seth Vargo terraform implementation of atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64-L84) For Go specific TLS resources have a look at the repository by [denji called golang-tls](https://github.com/denji/golang-tls). For a complete explanation on PKI, read this [article](https://smallstep.com/blog/everything-pki.html). - - diff --git a/runatlantis.io/docs/upgrading-atlantis-yaml.md b/runatlantis.io/docs/upgrading-atlantis-yaml.md index 1b8fe7aaa0..37e20900e8 100644 --- a/runatlantis.io/docs/upgrading-atlantis-yaml.md +++ b/runatlantis.io/docs/upgrading-atlantis-yaml.md @@ -1,15 +1,17 @@ # Upgrading atlantis.yaml ## Upgrading From v2 To v3 + Atlantis version `v0.7.0` introduced a new version 3 of `atlantis.yaml`. -**If you're not using [custom `run` steps](custom-workflows.html#custom-run-command), +**If you're not using [custom `run` steps](custom-workflows.md#custom-run-command), then you can upgrade from `version: 2` to `version: 3` without any changes.** **NOTE:** Version 2 **is not being deprecated** and there is no need to upgrade your version if you don't wish to do so. The only change from v2 to v3 is that we're parsing custom `run` steps differently. + ```yaml # atlantis.yaml workflows: @@ -18,33 +20,38 @@ workflows: steps: - run: my custom command ``` + <center><i>An example workflow using a custom run step</i></center> Previously, we used a library that would parse the custom step prior to running it. Now, we just run the step directly. This will only affect your steps if they were using shell escaping of some sort. For example, if your step was previously: + ```yaml # version: 2 - run: "printf \'print me\'" ``` You can now write this in version 3 as: + ```yaml # version: 3 - run: "printf 'print me'" ``` - ## Upgrading From V1 To V3 + If you are upgrading from an **old** Atlantis version `<=v0.3.10` (from before July 4, 2018) you'll need to follow the following steps. ### Single atlantis.yaml + If you had multiple `atlantis.yaml` files per directory then you'll need to consolidate them into a single `atlantis.yaml` file at the root of the repo. For example, if you had a directory structure: -``` + +```plain . ├── project1 │ └── atlantis.yaml @@ -53,7 +60,8 @@ For example, if you had a directory structure: ``` Then your new structure would look like: -``` + +```plain . ├── atlantis.yaml ├── project1 @@ -61,6 +69,7 @@ Then your new structure would look like: ``` And your `atlantis.yaml` would look something like: + ```yaml version: 2 projects: @@ -80,13 +89,16 @@ workflows: We will talk more about `workflows` below. ### Terraform Version + The `terraform_version` key moved from being a top-level key to being per `project` so if before your `atlantis.yaml` was in directory `mydir` and looked like: + ```yaml terraform_version: 0.11.0 ``` Then your new config would be: + ```yaml version: 2 projects: @@ -95,9 +107,11 @@ projects: ``` ### Workflows + Workflows are the new way to set all `pre_*`, `post_*` and `extra_arguments`. Each `project` can have a custom workflow via the `workflow` key. + ```yaml version: 2 projects: @@ -106,6 +120,7 @@ projects: ``` Workflows are defined as a top-level key: + ```yaml version: 2 projects: @@ -118,6 +133,7 @@ workflows: To start with, determine whether you're customizing commands that happen during `plan` or `apply`. You then set that key under the workflow's name: + ```yaml ... workflows: @@ -133,6 +149,7 @@ workflows: If you're not customizing a specific stage then you can omit that key. For example if you're only customizing the commands that happen during `plan` then your config will look like: + ```yaml ... workflows: @@ -143,7 +160,9 @@ workflows: ``` #### Extra Arguments + `extra_arguments` is now specified as follows. Given a previous config: + ```yaml extra_arguments: - command_name: init @@ -158,6 +177,7 @@ extra_arguments: ``` Your config would now look like: + ```yaml ... workflows: @@ -174,8 +194,8 @@ workflows: extra_args: ["-lock=false"] ``` - #### Pre/Post Commands + Instead of using `pre_*` or `post_*`, you now can insert your custom commands before/after the built-in commands. Given a previous config: @@ -202,6 +222,7 @@ post_apply: ``` Your config would now look like: + ```yaml ... workflows: diff --git a/runatlantis.io/docs/using-atlantis.md b/runatlantis.io/docs/using-atlantis.md index 15a0b5a681..16f5ade9a3 100644 --- a/runatlantis.io/docs/using-atlantis.md +++ b/runatlantis.io/docs/using-atlantis.md @@ -5,8 +5,9 @@ Atlantis triggers commands via pull request comments. ::: tip You can use following executable names. + * `atlantis help` - * `atlantis` is executable name. You can configure by [Executable Name](/docs/server-configuration.html#executable-name). + * `atlantis` is executable name. You can configure by [Executable Name](server-configuration.md#executable-name). * `run help` * `run` is a global executable name. * `@GithubUser help` @@ -14,35 +15,46 @@ You can use following executable names. ::: Currently, Atlantis supports the following commands. -[[toc]] --- + ## atlantis help + ```bash atlantis help ``` + ### Explanation + View help --- + ## atlantis version + ```bash atlantis version ``` ### Explanation + Print the output of 'terraform version'. --- + ## atlantis plan + ```bash atlantis plan [options] -- [terraform plan flags] ``` + ### Explanation + Runs `terraform plan` on the pull request's branch. You may wish to re-run plan after Atlantis has already done so if you've changed some resources manually. ### Examples + ```bash # Runs plan for any projects that Atlantis thinks were modified. # If an `atlantis.yaml` file is specified, runs plan on the projects that @@ -60,9 +72,10 @@ atlantis plan -w staging ``` ### Options + * `-d directory` Which directory to run plan in relative to root of repo. Use `.` for root. - * Ex. `atlantis plan -d child/dir` -* `-p project` Which project to run plan for. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w` because the project defines this already. + * Ex. `atlantis plan -d child/dir` +* `-p project` Which project to run plan for. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.md). Cannot be used at same time as `-d` or `-w` because the project defines this already. * `-w workspace` Switch to this [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) before planning. Defaults to `default`. Ignore this if Terraform workspaces are unused. * `--verbose` Append Atlantis log to comment. @@ -74,30 +87,38 @@ A `atlantis plan` (without flags), like autoplans, discards all plans previously If `terraform plan` requires additional arguments, like `-target=resource` or `-var 'foo=bar'` or `-var-file myfile.tfvars` you can append them to the end of the comment after `--`, ex. -``` + +```shell atlantis plan -d dir -- -var foo='bar' ``` -If you always need to append a certain flag, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands). + +If you always need to append a certain flag, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands). ### Using the -destroy Flag #### Example + To perform a destructive plan that will destroy resources you can use the `-destroy` flag like this: ```bash atlantis plan -- -destroy atlantis plan -d dir -- -destroy ``` -::: warning NOTE + +::: warning NOTE The `-destroy` flag generates a destroy plan, If this plan is applied it can result in data loss or service disruptions. Ensure that you have thoroughly reviewed your Terraform configuration and intend to remove the specified resources before using this flag. ::: --- + ## atlantis apply + ```bash atlantis apply [options] -- [terraform apply flags] ``` + ### Explanation + Runs `terraform apply` for the plan that matches the directory/project/workspace. ::: tip @@ -106,8 +127,8 @@ This includes all projects that have been planned manually with `atlantis plan` For Atlantis commands to work, Atlantis needs to know the location where the plan file is. For that, you can use $PLANFILE which will contain the path of the plan file to be used in your custom steps. i.e `terraform plan -out $PLANFILE` ::: - ### Examples + ```bash # Runs apply for all unapplied plans from this pull request. atlantis apply @@ -123,15 +144,18 @@ atlantis apply -w staging ``` ### Options + * `-d directory` Apply the plan for this directory, relative to root of repo. Use `.` for root. -* `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w`. +* `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.md). Cannot be used at same time as `-d` or `-w`. * `-w workspace` Apply the plan for this [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused. -* `--auto-merge-disabled` Disable [automerge](automerging.html) for this apply command. +* `--auto-merge-disabled` Disable [automerge](automerging.md) for this apply command. +* `--auto-merge-method method` Specify which [merge method](automerging.md#how-to-set-the-merge-method-for-automerge) use for the apply command if [automerge](automerging.md) is enabled. Implemented only for GitHub. * `--verbose` Append Atlantis log to comment. ### Additional Terraform flags Because Atlantis under the hood is running `terraform apply plan.tfplan`, any Terraform options that would change the `plan` are ignored, ex: + * `-target=resource` * `-var 'foo=bar'` * `-var-file=myfile.tfvars` @@ -140,17 +164,22 @@ They're ignored because they can't be specified for an already generated planfil If you would like to specify these flags, do it while running `atlantis plan`. --- + ## atlantis import + ```bash atlantis import [options] ADDRESS ID -- [terraform import flags] ``` + ### Explanation + Runs `terraform import` that matches the directory/project/workspace. This command discards the terraform plan result. After an import and before an apply, another `atlantis plan` must be run again. -To allow the `import` command requires [--allow-commands](/docs/server-configuration.html#allow-commands) configuration. +To allow the `import` command requires [--allow-commands](server-configuration.md#allow-commands) configuration. ### Examples + ```bash # Runs import atlantis import ADDRESS ID @@ -166,36 +195,45 @@ atlantis import -w staging ADDRESS ID ``` ::: tip + * If import for_each resources, it requires a single quoted address. * ex. `atlantis import 'aws_instance.example["foo"]' i-1234567890abcdef0` ::: ### Options + * `-d directory` Import a resource for this directory, relative to root of repo. Use `.` for root. -* `-p project` Import a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.html) repo configuration file. This cannot be used at the same time as `-d` or `-w`. +* `-p project` Import a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.md) repo configuration file. This cannot be used at the same time as `-d` or `-w`. * `-w workspace` Import a resource for a specific [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused. ### Additional Terraform flags If `terraform import` requires additional arguments, like `-var 'foo=bar'` or `-var-file myfile.tfvars` append them to the end of the comment after `--`, e.g. -``` + +```shell atlantis import -d dir 'aws_instance.example["foo"]' i-1234567890abcdef0 -- -var foo='bar' ``` -If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands). + +If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands). --- + ## atlantis state rm + ```bash atlantis state [options] rm ADDRESS... -- [terraform state rm flags] ``` + ### Explanation + Runs `terraform state rm` that matches the directory/project/workspace. This command discards the terraform plan result. After run state rm and before an apply, another `atlantis plan` must be run again. -To allow the `state` command requires [--allow-commands](/docs/server-configuration.html#allow-commands) configuration. +To allow the `state` command requires [--allow-commands](server-configuration.md#allow-commands) configuration. ### Examples + ```bash # Runs state rm atlantis state rm ADDRESS1 ADDRESS2 @@ -211,44 +249,55 @@ atlantis state -w staging rm ADDRESS ``` ::: tip + * If run state rm to for_each resources, it requires a single quoted address. * ex. `atlantis state rm 'aws_instance.example["foo"]'` ::: ### Options + * `-d directory` Run state rm a resource for this directory, relative to root of repo. Use `.` for root. -* `-p project` Run state rm a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.html) repo configuration file. This cannot be used at the same time as `-d` or `-w`. +* `-p project` Run state rm a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.md) repo configuration file. This cannot be used at the same time as `-d` or `-w`. * `-w workspace` Run state rm a resource for a specific [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused. ### Additional Terraform flags If `terraform state rm` requires additional arguments, like `-lock=false'` append them to the end of the comment after `--`, e.g. -``` + +```shell atlantis state -d dir rm 'aws_instance.example["foo"]' -- -lock=false ``` -If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands). + +If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands). --- + ## atlantis unlock + ```bash atlantis unlock ``` ### Explanation + Removes all atlantis locks and discards all plans for this PR. To unlock a specific plan you can use the Atlantis UI. --- + ## atlantis approve_policies + ```bash atlantis approve_policies ``` ### Explanation + Approves all current policy checking failures for the PR. -See also [policy checking](/docs/policy-checking.html). +See also [policy checking](policy-checking.md). ### Options + * `--verbose` Append Atlantis log to comment. diff --git a/runatlantis.io/docs/using-slack-hooks.md b/runatlantis.io/docs/using-slack-hooks.md index c75c243fca..572b0857f8 100644 --- a/runatlantis.io/docs/using-slack-hooks.md +++ b/runatlantis.io/docs/using-slack-hooks.md @@ -13,7 +13,7 @@ For this you'll need to: ## Configuring Slack for Atlantis -* Go to [https://api.slack.com/apps](https://api.slack.com/apps) +* Go to [Slack: Apps](https://api.slack.com/apps) * Click the `Create New App` button * Select `From scratch` in the dialog that opens * Give it a name, e.g. `atlantis-bot`. @@ -43,13 +43,12 @@ webhooks: workspace-regex: .* branch-regex: .* kind: slack - channel: my-channel + channel: my-channel-id ``` If you are deploying Atlantis as a Helm chart, this can be implemented via the `config` parameter available for [chart customizations](https://github.com/runatlantis/helm-charts#customization): -``` - +```yaml ## Use Server Side Config, ## ref: https://www.runatlantis.io/docs/server-configuration.html config: | @@ -59,9 +58,7 @@ config: | workspace-regex: .* branch-regex: .* kind: slack - channel: my-channel + channel: my-channel-id ``` - - -The `apply` event information will be sent to the `my-channel` Slack channel. +The `apply` event information will be sent to the `my-channel-id` Slack channel. diff --git a/runatlantis.io/docs/webhook-secrets.md b/runatlantis.io/docs/webhook-secrets.md index 8b66ee8276..4e2ab1a059 100644 --- a/runatlantis.io/docs/webhook-secrets.md +++ b/runatlantis.io/docs/webhook-secrets.md @@ -17,27 +17,30 @@ Azure DevOps uses Basic authentication for webhooks rather than webhook secrets. ::: ::: tip NOTE -An app-wide token is generated during [GitHub App setup](access-credentials.html#github-app). You can recover it by navigating to the [GitHub app settings page](https://github.com/settings/apps) and selecting "Edit" next to your Atlantis app's name. Token appears after clicking "Edit" under the Webhook header. +An app-wide token is generated during [GitHub App setup](access-credentials.md#github-app). You can recover it by navigating to the [GitHub app settings page](https://github.com/settings/apps) and selecting "Edit" next to your Atlantis app's name. Token appears after clicking "Edit" under the Webhook header. ::: ::: warning Bitbucket.org **does not** support webhook secrets. -To mitigate, use repo allowlists and IP allowlists. See [Security](security.html#bitbucket-cloud-bitbucket-org) for more information. +To mitigate, use repo allowlists and IP allowlists. See [Security](security.md#bitbucket-cloud-bitbucket-org) for more information. ::: ## Generating A Webhook Secret + You can use any random string generator to create your Webhook secret. It should be > 24 characters. For example: + * Generate via Ruby with `ruby -rsecurerandom -e 'puts SecureRandom.hex(32)'` -* Generate online with [https://www.browserling.com/tools/random-string](https://www.browserling.com/tools/random-string) +* Generate online with [browserling: Generate Random Strings and Numbers](https://www.browserling.com/tools/random-string) ::: tip NOTE You must use **the same** webhook secret for each repo. ::: ## Next Steps + * Record your secret -* You'll be using it later to [configure your webhooks](configuring-webhooks.html), however if you're -following the [Installation Guide](installation-guide.html) then your next step is to -[Deploy Atlantis](deployment.html) +* You'll be using it later to [configure your webhooks](configuring-webhooks.md), however if you're +following the [Installation Guide](installation-guide.md) then your next step is to +[Deploy Atlantis](deployment.md) diff --git a/runatlantis.io/e2e/site-check.spec.js b/runatlantis.io/e2e/site-check.spec.js new file mode 100644 index 0000000000..2fbf3b5a3a --- /dev/null +++ b/runatlantis.io/e2e/site-check.spec.js @@ -0,0 +1,12 @@ +import { test } from '@playwright/test'; + +test('page should load without errors', async ({ page }) => { + // Listen for any errors that occur within the page + page.on('pageerror', error => { + console.error('Page error:', error.message); + throw new Error(`Page error: ${error.message}`); + }); + + // Navigate to the URL + await page.goto('http://localhost:8080/'); +}); diff --git a/runatlantis.io/guide/README.md b/runatlantis.io/guide.md similarity index 80% rename from runatlantis.io/guide/README.md rename to runatlantis.io/guide.md index 15472518b8..9d71a3acf1 100644 --- a/runatlantis.io/guide/README.md +++ b/runatlantis.io/guide.md @@ -1,15 +1,17 @@ # Introduction ## Getting Started -* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.html). -* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html). -* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html). + +* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](./guide/test-drive.md). +* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](./guide/testing-locally.md). +* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](./docs/installation-guide.md). ::: tip Looking for the full docs? -Go here: [www.runatlantis.io/docs](/docs/) +Go here: [www.runatlantis.io/docs](./docs.md) ::: ## Overview – What Is Atlantis? + Atlantis is an application for automating Terraform via pull requests. It is deployed as a standalone application into your infrastructure. No third-party has access to your credentials. @@ -21,14 +23,18 @@ When you want to apply, comment `atlantis apply` on the pull request and Atlanti will run `terraform apply` and comment back with the output. ## Watch + Check out the video below to see it in action: -[![Atlantis Walkthrough](./images/atlantis-walkthrough-icon.png)](https://www.youtube.com/watch?v=TmIPWda0IKg) +[![Atlantis Walkthrough](./guide/images/atlantis-walkthrough-icon.png)](https://www.youtube.com/watch?v=TmIPWda0IKg) ## Why would you run Atlantis? + ### Increased visibility + When everyone is executing Terraform on their own computers, it's hard to know the current state of your infrastructure: + * Is what's in `main` branch deployed? * Did someone forget to create a pull request for that latest change? * What was the output from that last `terraform apply`? @@ -37,6 +43,7 @@ With Atlantis, everything is visible on the pull request. You can view the histo of everything that was done to your infrastructure. ### Enable collaboration with everyone + You probably don't want to distribute Terraform credentials to everyone in your engineering organization, but now anyone can open up a Terraform pull request. @@ -44,10 +51,12 @@ You can require approval before the pull request is applied so nothing happens accidentally. ### Review Terraform pull requests better + You can't fully review a Terraform change without seeing the output of `terraform plan`. Now that output is added to the pull request automatically. ### Standardize your workflows + Atlantis locks a directory/workspace until the pull request is merged or the lock is manually deleted. This ensures that changes are applied in the order expected. @@ -55,6 +64,7 @@ The exact commands that Atlantis runs are configurable. You can run custom scrip to construct your ideal workflow. ## Next Steps -* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.html). -* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html). -* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html). + +* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](./guide/test-drive.md). +* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](./guide/testing-locally.md). +* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](./docs/installation-guide.md). diff --git a/runatlantis.io/guide/test-drive.md b/runatlantis.io/guide/test-drive.md index 22e8c77f21..8510f0a0e2 100644 --- a/runatlantis.io/guide/test-drive.md +++ b/runatlantis.io/guide/test-drive.md @@ -1,18 +1,22 @@ # Test Drive -To test drive Atlantis on an example repo, download the latest release: -[https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases) + +To test drive Atlantis on an example repo, download the latest release from +[GitHub](https://github.com/runatlantis/atlantis/releases) Once you've extracted the archive, run: + ```bash ./atlantis testdrive ``` This mode sets up Atlantis on a test repo so you can try it out. It will + - Fork an example Terraform project into your GitHub account - Install Terraform (if not already in your PATH) - Install [ngrok](https://ngrok.com/) so we can expose Atlantis to GitHub - Start Atlantis so you can execute commands on the pull request ## Next Steps -* If you're ready to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html). -* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html). + +- If you're ready to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.md). +- If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](../docs/installation-guide.md). diff --git a/runatlantis.io/guide/testing-locally.md b/runatlantis.io/guide/testing-locally.md index dcbc3d0f70..ae7131f6af 100644 --- a/runatlantis.io/guide/testing-locally.md +++ b/runatlantis.io/guide/testing-locally.md @@ -1,57 +1,67 @@ # Testing Locally + These instructions are for running Atlantis **locally on your own computer** so you can test it out against your own repositories before deciding whether to install it more permanently. ::: tip -If you want to set up a production-ready Atlantis installation, read [Deployment](../docs/deployment.html). +If you want to set up a production-ready Atlantis installation, read [Deployment](../docs/deployment.md). ::: Steps: -[[toc]] - ## Install Terraform + `terraform` needs to be in the `$PATH` for Atlantis. -Download from [https://developer.hashicorp.com/terraform/downloads](https://developer.hashicorp.com/terraform/downloads) -``` +Download from [Terraform](https://developer.hashicorp.com/terraform/downloads) + +```shell unzip path/to/terraform_*.zip -d /usr/local/bin ``` ## Download Atlantis -Get the latest release from [https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases) + +Get the latest release from [GitHub](https://github.com/runatlantis/atlantis/releases) and unpackage it. ## Download Ngrok + Atlantis needs to be accessible somewhere that github.com/gitlab.com/bitbucket.org or your GitHub/GitLab Enterprise installation can reach. One way to accomplish this is with ngrok, a tool that forwards your local port to a random public hostname. -Go to [https://ngrok.com/download](https://ngrok.com/download), download ngrok and `unzip` it. +[Download](https://ngrok.com/download) ngrok and `unzip` it. Start `ngrok` on port `4141` and take note of the hostname it gives you: + ```bash ./ngrok http 4141 ``` In a new tab (where you'll soon start Atlantis) create an environment variable with ngrok's hostname: + ```bash URL="https://{YOUR_HOSTNAME}.ngrok.io" ``` ## Create a Webhook Secret + GitHub and GitLab use webhook secrets so clients can verify that the webhooks came from them. -Create a random string of any length (you can use [https://www.random.org/strings/](https://www.random.org/strings/)) + +Create a random string of any length (you can use [random.org](https://www.random.org/strings/)) and set an environment variable: -``` + +```shell SECRET="{YOUR_RANDOM_STRING}" ``` ## Add Webhook + Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bitbucket repo: ### GitHub or GitHub Enterprise Webhook + <details> <summary>Expand</summary> <ul> @@ -77,6 +87,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi </details> ### GitLab or GitLab Enterprise Webhook + <details> <summary>Expand</summary> <ul> @@ -98,6 +109,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi </details> ### Bitbucket Cloud (bitbucket.org) Webhook + <details> <summary>Expand</summary> <ul> @@ -119,6 +131,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi </details> ### Bitbucket Server (aka Stash) Webhook + <details> <summary>Expand</summary> <ul> @@ -135,51 +148,99 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi </ul> </details> +### Gitea Webhook + +<details> + <summary>Expand</summary> + <ul> + <li>Click <strong>Settings > Webhooks</strong> in the top- and then sidebar</li> + <li>Click <strong>Add webhook > Gitea</strong> (Gitea webhooks are service specific, but this works)</li> + <li>set <strong>Target URL</strong> to <code>http://$URL/events</code> (or <code>https://$URL/events</code> if you're using SSL) where <code>$URL</code> is where Atlantis is hosted. <strong>Be sure to add <code>/events</code></strong></li> + <li>double-check you added <code>/events</code> to the end of your URL.</li> + <li>set <strong>Secret</strong> to the Webhook Secret you generated previously + <ul> + <li><strong>NOTE</strong> If you're adding a webhook to multiple repositories, each repository will need to use the <strong>same</strong> secret.</li> + </ul> + </li> + <li>Select <strong>Custom Events...</strong></li> + <li>Check the boxes + <ul> + <li><strong>Repository events > Push</strong></li> + <li><strong>Issue events > Issue Comment</strong></li> + <li><strong>Pull Request events > Pull Request</strong></li> + <li><strong>Pull Request events > Pull Request Comment</strong></li> + <li><strong>Pull Request events > Pull Request Reviewed</strong></li> + <li><strong>Pull Request events > Pull Request Synchronized</strong></li> + </ul> + </li> + <li>Leave <strong>Active</strong> checked</li> + <li>Click <strong>Add Webhook</strong></li> + <li>See <a href="#next-steps">Next Steps</a></li> + </ul> +</details> ## Create an access token for Atlantis + We recommend using a dedicated CI user or creating a new user named **@atlantis** that performs all API actions, however for testing, you can use your own user. Here we'll create the access token that Atlantis uses to comment on the pull request and set commit statuses. ### GitHub or GitHub Enterprise Access Token + - Create a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-fine-grained-personal-access-token) - create a token with **repo** scope - set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### GitLab or GitLab Enterprise Access Token -- follow [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) + +- follow [GitLab: Create a personal access token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) - create a token with **api** scope - set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### Bitbucket Cloud (bitbucket.org) Access Token -- follow [https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) + +- follow [BitBucket Cloud: Create an app password](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) - Label the password "atlantis" - Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them - set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### Bitbucket Server (aka Stash) Access Token + - Click on your avatar in the top right and select **Manage account** - Click **HTTP access tokens** in the sidebar - Click **Create token** - Name the token **atlantis** - Give the token **Read** Project permissions and **Write** Pull request permissions -- Choose an Expiry option **Do not expire** or **Expire automatically** +- Choose an Expiry option **Do not expire** or **Expire automatically** - Click **Create** and set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` +### Gitea Access Token + +- Go to "Profile and Settings" > "Settings" in Gitea (top-right) +- Go to "Applications" under "User Settings" in Gitea +- Create a token under the "Manage Access Tokens" with the following permissions: + - issue: Read and Write + - repository: Read and Write +- Record the access token ## Start Atlantis + You're almost ready to start Atlantis, just set two more variables: ```bash @@ -190,9 +251,11 @@ REPO_ALLOWLIST="$YOUR_GIT_HOST/$YOUR_USERNAME/$YOUR_REPO" # server without scheme or port and $YOUR_USERNAME will be the name of the **project** the repo # is under, **not the key** of the project. ``` + Now you can start Atlantis. The exact command differs depending on your Git host: ### GitHub Command + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -203,6 +266,7 @@ atlantis server \ ``` ### GitHub Enterprise Command + ```bash HOSTNAME=YOUR_GITHUB_ENTERPRISE_HOSTNAME # ex. github.runatlantis.io atlantis server \ @@ -215,6 +279,7 @@ atlantis server \ ``` ### GitLab Command + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -225,6 +290,7 @@ atlantis server \ ``` ### GitLab Enterprise Command + ```bash HOSTNAME=YOUR_GITLAB_ENTERPRISE_HOSTNAME # ex. gitlab.runatlantis.io atlantis server \ @@ -237,6 +303,7 @@ atlantis server \ ``` ### Bitbucket Cloud (bitbucket.org) Command + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -246,6 +313,7 @@ atlantis server \ ``` ### Bitbucket Server (aka Stash) Command + ```bash BASE_URL=YOUR_BITBUCKET_SERVER_URL # ex. http://bitbucket.mycorp:7990 atlantis server \ @@ -273,46 +341,71 @@ atlantis server \ --ssl-key-file=file.key ``` +### Gitea + +```bash +atlantis server \ +--atlantis-url="$URL" \ +--gitea-user="$ATLANTIS_GITEA_USER" \ +--gitea-token="$ATLANTIS_GITEA_TOKEN" \ +--gitea-webhook-secret="$ATLANTIS_GITEA_WEBHOOK_SECRET" \ +--gitea-base-url="$ATLANTIS_GITEA_BASE_URL" \ +--gitea-page-size="$ATLANTIS_GITEA_PAGE_SIZE" \ +--repo-allowlist="$REPO_ALLOWLIST" +--ssl-cert-file=file.crt +--ssl-key-file=file.key +``` + ## Create a pull request + Create a pull request so you can test Atlantis. ::: tip You could add a null resource as a test: + ```hcl resource "null_resource" "example" {} ``` + Or just modify the whitespace in a file. ::: ### Autoplan + You should see Atlantis logging about receiving the webhook and you should see the output of `terraform plan` on your repo. Atlantis tries to figure out the directory to plan in based on the files modified. If you need to customize the directories that Atlantis runs in or the commands it runs if you're using workspaces -or `.tfvars` files, see [atlantis.yaml Reference](/docs/repo-level-atlantis-yaml.html#reference). +or `.tfvars` files, see [atlantis.yaml Reference](../docs/repo-level-atlantis-yaml.md#reference). ### Manual Plan + To manually `plan` in a specific directory or workspace, comment on the pull request using the `-d` or `-w` flags: -``` + +```shell atlantis plan -d mydir atlantis plan -w staging ``` To add additional arguments to the underlying `terraform plan` you can use: -``` + +```shell atlantis plan -- -target=resource -var 'foo=bar' ``` ### Apply + If you'd like to `apply`, type a comment: `atlantis apply`. You can use the `-d` or `-w` flags to point Atlantis at a specific plan. Otherwise it tries to apply the plan for the root directory. ## Real-time logs -The [real-time terraform output](/docs/streaming-logs.md) for your command can be found by clicking into the status check for a given project in a PR which + +The [real-time terraform output](../docs/streaming-logs.md) for your command can be found by clicking into the status check for a given project in a PR which links to the log-streaming UI. This is a terminal UI where you can view your commands executing in real-time. ## Next Steps -* If things are working as expected you can `Ctrl-C` the `atlantis server` command and the `ngrok` command. -* Hopefully Atlantis is working with your repo and you're ready to move on to a [production-ready deployment](../docs/deployment.html). -* If it's not working as expected, you may need to customize how Atlantis runs with an `atlantis.yaml` file. -See [atlantis.yaml use cases](/docs/repo-level-atlantis-yaml.html#use-cases). -* Check out our [full documentation](../docs/) for more details. + +- If things are working as expected you can `Ctrl-C` the `atlantis server` command and the `ngrok` command. +- Hopefully Atlantis is working with your repo and you're ready to move on to a [production-ready deployment](../docs/deployment.md). +- If it's not working as expected, you may need to customize how Atlantis runs with an `atlantis.yaml` file. +See [atlantis.yaml use cases](../docs/repo-level-atlantis-yaml.md#use-cases). +- Check out our [full documentation](../docs.md) for more details. diff --git a/runatlantis.io/index.md b/runatlantis.io/index.md new file mode 100644 index 0000000000..bee29ef85b --- /dev/null +++ b/runatlantis.io/index.md @@ -0,0 +1,43 @@ +--- +# https://vitepress.dev/reference/default-theme-home-page +layout: home + +pageClass: home-custom + +hero: + name: Atlantis + text: Terraform Pull Request Automation + tagline: Running Terraform Workflows with Ease + image: /hero.png + actions: + - theme: brand + text: Get Started + link: /guide + - theme: alt + text: What is Atlantis? + link: /blog/2017/introducing-atlantis + - theme: alt + text: Join us on Slack + link: https://communityinviter.com/apps/cloud-native/cncf + +features: + - title: Fewer Mistakes + details: "Catch errors in Terraform plan output before applying changes. Ensure changes are applied before merging." + icon: ✅ + - title: Empower Developers + details: "Developers can safely submit Terraform pull requests without credentials. Require approvals for applies." + icon: đŸ’ģ + - title: Instant Audit Logs + details: "Detailed logs for infrastructure changes, approvals, and user actions. Configure approvals for production changes." + icon: 📋 + - title: Proven at Scale + details: "Used by top companies to manage over 600 repos with 300 developers. In production since 2017." + icon: 🌍 + - title: Self-Hosted + details: "Your credentials remain secure. Deployable on VMs, Kubernetes, Fargate, etc. Supports GitHub, GitLab, Bitbucket, Azure DevOps." + icon: ⚙ī¸ + - title: Open Source + details: "Atlantis is an open source project with strong community support, powered by volunteer contributions." + icon: 🌐 + +--- diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-114x114.png b/runatlantis.io/public/apple-touch-icon-114x114.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-114x114.png rename to runatlantis.io/public/apple-touch-icon-114x114.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-120x120.png b/runatlantis.io/public/apple-touch-icon-120x120.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-120x120.png rename to runatlantis.io/public/apple-touch-icon-120x120.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-144x144.png b/runatlantis.io/public/apple-touch-icon-144x144.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-144x144.png rename to runatlantis.io/public/apple-touch-icon-144x144.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-152x152.png b/runatlantis.io/public/apple-touch-icon-152x152.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-152x152.png rename to runatlantis.io/public/apple-touch-icon-152x152.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-57x57.png b/runatlantis.io/public/apple-touch-icon-57x57.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-57x57.png rename to runatlantis.io/public/apple-touch-icon-57x57.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-60x60.png b/runatlantis.io/public/apple-touch-icon-60x60.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-60x60.png rename to runatlantis.io/public/apple-touch-icon-60x60.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-72x72.png b/runatlantis.io/public/apple-touch-icon-72x72.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-72x72.png rename to runatlantis.io/public/apple-touch-icon-72x72.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-76x76.png b/runatlantis.io/public/apple-touch-icon-76x76.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-76x76.png rename to runatlantis.io/public/apple-touch-icon-76x76.png diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic1.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic1.webp new file mode 100644 index 0000000000..50d4156d52 Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic1.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic2.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic2.webp new file mode 100644 index 0000000000..fe15cbf47e Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic2.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic3.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic3.webp new file mode 100644 index 0000000000..b448df066d Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic3.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic4.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic4.webp new file mode 100644 index 0000000000..79418e0b3e Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic4.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic5.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic5.webp new file mode 100644 index 0000000000..3c0086d447 Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic5.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic6.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic6.webp new file mode 100644 index 0000000000..3be25b55b5 Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic6.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic7.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic7.webp new file mode 100644 index 0000000000..bf38895ebc Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic7.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic8.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic8.webp new file mode 100644 index 0000000000..9220492f87 Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic8.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic9.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic9.webp new file mode 100644 index 0000000000..1aaad9cc7b Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic9.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/deploy.webp b/runatlantis.io/public/blog/april-2024-survey-results/deploy.webp new file mode 100644 index 0000000000..38b721ccaa Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/deploy.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/features.webp b/runatlantis.io/public/blog/april-2024-survey-results/features.webp new file mode 100644 index 0000000000..d116d5ab5f Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/features.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/iac.webp b/runatlantis.io/public/blog/april-2024-survey-results/iac.webp new file mode 100644 index 0000000000..793dba5de6 Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/iac.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/interact.webp b/runatlantis.io/public/blog/april-2024-survey-results/interact.webp new file mode 100644 index 0000000000..0eca135b15 Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/interact.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/repos.webp b/runatlantis.io/public/blog/april-2024-survey-results/repos.webp new file mode 100644 index 0000000000..6e15d4e9f6 Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/repos.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/vcs.webp b/runatlantis.io/public/blog/april-2024-survey-results/vcs.webp new file mode 100644 index 0000000000..628ab3869d Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/vcs.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic1.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic1.webp new file mode 100644 index 0000000000..72dbca2425 Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic1.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic2.gif b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic2.gif new file mode 100644 index 0000000000..5846753a4f Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic2.gif differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic3.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic3.webp new file mode 100644 index 0000000000..8119b862e5 Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic3.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic4.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic4.webp new file mode 100644 index 0000000000..96a6eb388d Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic4.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic5.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic5.webp new file mode 100644 index 0000000000..936b7a02ab Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic5.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic6.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic6.webp new file mode 100644 index 0000000000..aafbc40298 Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic6.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic7.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic7.webp new file mode 100644 index 0000000000..5e55af24fe Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic7.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic8.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic8.webp new file mode 100644 index 0000000000..ced941c865 Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic8.webp differ diff --git a/runatlantis.io/public/blog/hosting-our-static-site/code/cloudfront.tf b/runatlantis.io/public/blog/hosting-our-static-site/code/cloudfront.tf new file mode 100644 index 0000000000..3f0a3a4715 --- /dev/null +++ b/runatlantis.io/public/blog/hosting-our-static-site/code/cloudfront.tf @@ -0,0 +1,59 @@ +resource "aws_cloudfront_distribution" "www_distribution" { + // origin is where CloudFront gets its content from. + origin { + // We need to set up a "custom" origin because otherwise CloudFront won't + // redirect traffic from the root domain to the www domain, that is from + // runatlantis.io to www.runatlantis.io. + custom_origin_config { + // These are all the defaults. + http_port = "80" + https_port = "443" + origin_protocol_policy = "http-only" + origin_ssl_protocols = ["TLSv1", "TLSv1.1", "TLSv1.2"] + } + + // Here we're using our S3 bucket's URL! + domain_name = "${aws_s3_bucket.www.website_endpoint}" + // This can be any name to identify this origin. + origin_id = "${var.www_domain_name}" + } + + enabled = true + default_root_object = "index.html" + + // All values are defaults from the AWS console. + default_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + compress = true + allowed_methods = ["GET", "HEAD"] + cached_methods = ["GET", "HEAD"] + // This needs to match the `origin_id` above. + target_origin_id = "${var.www_domain_name}" + min_ttl = 0 + default_ttl = 86400 + max_ttl = 31536000 + + forwarded_values { + query_string = false + cookies { + forward = "none" + } + } + } + + // Here we're ensuring we can hit this distribution using www.runatlantis.io + // rather than the domain name CloudFront gives us. + aliases = ["${var.www_domain_name}"] + + restrictions { + geo_restriction { + restriction_type = "none" + } + } + + // Here's where our certificate is loaded in! + viewer_certificate { + acm_certificate_arn = "${aws_acm_certificate.certificate.arn}" + ssl_support_method = "sni-only" + } +} diff --git a/runatlantis.io/public/blog/hosting-our-static-site/code/dns.tf b/runatlantis.io/public/blog/hosting-our-static-site/code/dns.tf new file mode 100644 index 0000000000..3d1c0a4a57 --- /dev/null +++ b/runatlantis.io/public/blog/hosting-our-static-site/code/dns.tf @@ -0,0 +1,18 @@ +// We want AWS to host our zone so its nameservers can point to our CloudFront +// distribution. +resource "aws_route53_zone" "zone" { + name = "${var.root_domain_name}" +} + +// This Route53 record will point at our CloudFront distribution. +resource "aws_route53_record" "www" { + zone_id = "${aws_route53_zone.zone.zone_id}" + name = "${var.www_domain_name}" + type = "A" + + alias = { + name = "${aws_cloudfront_distribution.www_distribution.domain_name}" + zone_id = "${aws_cloudfront_distribution.www_distribution.hosted_zone_id}" + evaluate_target_health = false + } +} diff --git a/runatlantis.io/public/blog/hosting-our-static-site/code/full.tf b/runatlantis.io/public/blog/hosting-our-static-site/code/full.tf new file mode 100644 index 0000000000..c35ac47529 --- /dev/null +++ b/runatlantis.io/public/blog/hosting-our-static-site/code/full.tf @@ -0,0 +1,84 @@ +resource "aws_s3_bucket" "root" { + bucket = "${var.root_domain_name}" + acl = "public-read" + policy = <<POLICY +{ + "Version":"2012-10-17", + "Statement":[ + { + "Sid":"AddPerm", + "Effect":"Allow", + "Principal": "*", + "Action":["s3:GetObject"], + "Resource":["arn:aws:s3:::${var.root_domain_name}/*"] + } + ] +} +POLICY + + website { + // Note this redirect. Here's where the magic happens. + redirect_all_requests_to = "https://${var.www_domain_name}" + } +} + +resource "aws_cloudfront_distribution" "root_distribution" { + origin { + custom_origin_config { + http_port = "80" + https_port = "443" + origin_protocol_policy = "http-only" + origin_ssl_protocols = ["TLSv1", "TLSv1.1", "TLSv1.2"] + } + domain_name = "${aws_s3_bucket.root.website_endpoint}" + origin_id = "${var.root_domain_name}" + } + + enabled = true + default_root_object = "index.html" + + default_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + compress = true + allowed_methods = ["GET", "HEAD"] + cached_methods = ["GET", "HEAD"] + target_origin_id = "${var.root_domain_name}" + min_ttl = 0 + default_ttl = 86400 + max_ttl = 31536000 + + forwarded_values { + query_string = false + cookies { + forward = "none" + } + } + } + + aliases = ["${var.root_domain_name}"] + + restrictions { + geo_restriction { + restriction_type = "none" + } + } + + viewer_certificate { + acm_certificate_arn = "${aws_acm_certificate.certificate.arn}" + ssl_support_method = "sni-only" + } +} + +resource "aws_route53_record" "root" { + zone_id = "${aws_route53_zone.zone.zone_id}" + + // NOTE: name is blank here. + name = "" + type = "A" + + alias = { + name = "${aws_cloudfront_distribution.root_distribution.domain_name}" + zone_id = "${aws_cloudfront_distribution.root_distribution.hosted_zone_id}" + evaluate_target_health = false + } +} diff --git a/runatlantis.io/public/blog/hosting-our-static-site/code/main.tf b/runatlantis.io/public/blog/hosting-our-static-site/code/main.tf new file mode 100644 index 0000000000..717a18ed6e --- /dev/null +++ b/runatlantis.io/public/blog/hosting-our-static-site/code/main.tf @@ -0,0 +1,14 @@ +// This block tells Terraform that we're going to provision AWS resources. +provider "aws" { + region = "us-east-1" +} + +// Create a variable for our domain name because we'll be using it a lot. +variable "www_domain_name" { + default = "www.runatlantis.io" +} + +// We'll also need the root domain (also known as zone apex or naked domain). +variable "root_domain_name" { + default = "runatlantis.io" +} diff --git a/runatlantis.io/public/blog/hosting-our-static-site/code/s3-bucket.tf b/runatlantis.io/public/blog/hosting-our-static-site/code/s3-bucket.tf new file mode 100644 index 0000000000..f2421078cb --- /dev/null +++ b/runatlantis.io/public/blog/hosting-our-static-site/code/s3-bucket.tf @@ -0,0 +1,34 @@ +resource "aws_s3_bucket" "www" { + // Our bucket's name is going to be the same as our site's domain name. + bucket = "${var.www_domain_name}" + // Because we want our site to be available on the internet, we set this so + // anyone can read this bucket. + acl = "public-read" + // We also need to create a policy that allows anyone to view the content. + // This is basically duplicating what we did in the ACL but it's required by + // AWS. This post: http://amzn.to/2Fa04ul explains why. + policy = <<POLICY +{ + "Version":"2012-10-17", + "Statement":[ + { + "Sid":"AddPerm", + "Effect":"Allow", + "Principal": "*", + "Action":["s3:GetObject"], + "Resource":["arn:aws:s3:::${var.www_domain_name}/*"] + } + ] +} +POLICY + + // S3 understands what it means to host a website. + website { + // Here we tell S3 what to use when a request comes in to the root + // ex. https://www.runatlantis.io + index_document = "index.html" + // The page to serve up if a request results in an error or a non-existing + // page. + error_document = "404.html" + } +} diff --git a/runatlantis.io/public/blog/hosting-our-static-site/code/ssl-cert.tf b/runatlantis.io/public/blog/hosting-our-static-site/code/ssl-cert.tf new file mode 100644 index 0000000000..5f2afb379e --- /dev/null +++ b/runatlantis.io/public/blog/hosting-our-static-site/code/ssl-cert.tf @@ -0,0 +1,12 @@ +// Use the AWS Certificate Manager to create an SSL cert for our domain. +// This resource won't be created until you receive the email verifying you +// own the domain and you click on the confirmation link. +resource "aws_acm_certificate" "certificate" { + // We want a wildcard cert so we can host subdomains later. + domain_name = "*.${var.root_domain_name}" + validation_method = "EMAIL" + + // We also want the cert to be valid for the root domain even though we'll be + // redirecting to the www. domain immediately. + subject_alternative_names = ["${var.root_domain_name}"] +} diff --git a/runatlantis.io/public/blog/hosting-our-static-site/pic1.webp b/runatlantis.io/public/blog/hosting-our-static-site/pic1.webp new file mode 100644 index 0000000000..2c9eccd943 Binary files /dev/null and b/runatlantis.io/public/blog/hosting-our-static-site/pic1.webp differ diff --git a/runatlantis.io/public/blog/hosting-our-static-site/pic2.webp b/runatlantis.io/public/blog/hosting-our-static-site/pic2.webp new file mode 100644 index 0000000000..161d045f55 Binary files /dev/null and b/runatlantis.io/public/blog/hosting-our-static-site/pic2.webp differ diff --git a/runatlantis.io/public/blog/intro/intro1.gif b/runatlantis.io/public/blog/intro/intro1.gif new file mode 100644 index 0000000000..9e6bedb234 Binary files /dev/null and b/runatlantis.io/public/blog/intro/intro1.gif differ diff --git a/runatlantis.io/public/blog/intro/intro2.webp b/runatlantis.io/public/blog/intro/intro2.webp new file mode 100644 index 0000000000..73f3a8b687 Binary files /dev/null and b/runatlantis.io/public/blog/intro/intro2.webp differ diff --git a/runatlantis.io/public/blog/intro/intro3.webp b/runatlantis.io/public/blog/intro/intro3.webp new file mode 100644 index 0000000000..06e695872b Binary files /dev/null and b/runatlantis.io/public/blog/intro/intro3.webp differ diff --git a/runatlantis.io/public/blog/intro/intro4.webp b/runatlantis.io/public/blog/intro/intro4.webp new file mode 100644 index 0000000000..1e5447af91 Binary files /dev/null and b/runatlantis.io/public/blog/intro/intro4.webp differ diff --git a/runatlantis.io/public/blog/intro/intro5.webp b/runatlantis.io/public/blog/intro/intro5.webp new file mode 100644 index 0000000000..570893d50b Binary files /dev/null and b/runatlantis.io/public/blog/intro/intro5.webp differ diff --git a/runatlantis.io/public/blog/intro/intro6.webp b/runatlantis.io/public/blog/intro/intro6.webp new file mode 100644 index 0000000000..737f2bdb37 Binary files /dev/null and b/runatlantis.io/public/blog/intro/intro6.webp differ diff --git a/runatlantis.io/public/blog/intro/intro7.webp b/runatlantis.io/public/blog/intro/intro7.webp new file mode 100644 index 0000000000..0d17cfa0de Binary files /dev/null and b/runatlantis.io/public/blog/intro/intro7.webp differ diff --git a/runatlantis.io/public/blog/intro/intro8.webp b/runatlantis.io/public/blog/intro/intro8.webp new file mode 100644 index 0000000000..c78901d58d Binary files /dev/null and b/runatlantis.io/public/blog/intro/intro8.webp differ diff --git a/runatlantis.io/public/blog/intro/intro9.webp b/runatlantis.io/public/blog/intro/intro9.webp new file mode 100644 index 0000000000..3a9fe02848 Binary files /dev/null and b/runatlantis.io/public/blog/intro/intro9.webp differ diff --git a/runatlantis.io/public/blog/joining-hashicorp/pic1.webp b/runatlantis.io/public/blog/joining-hashicorp/pic1.webp new file mode 100644 index 0000000000..b16f758893 Binary files /dev/null and b/runatlantis.io/public/blog/joining-hashicorp/pic1.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic1.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic1.webp new file mode 100644 index 0000000000..d446a53faf Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic1.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic10.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic10.webp new file mode 100644 index 0000000000..5600b65d45 Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic10.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic2.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic2.webp new file mode 100644 index 0000000000..dddf61327c Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic2.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic3.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic3.webp new file mode 100644 index 0000000000..0968d91418 Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic3.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic4.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic4.webp new file mode 100644 index 0000000000..b22d7baa32 Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic4.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic5.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic5.webp new file mode 100644 index 0000000000..7ec132b7a9 Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic5.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic6.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic6.webp new file mode 100644 index 0000000000..49e2ef5741 Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic6.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic7.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic7.webp new file mode 100644 index 0000000000..2ac0d1fa26 Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic7.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic8.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic8.webp new file mode 100644 index 0000000000..cda06abea7 Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic8.webp differ diff --git a/runatlantis.io/public/blog/putting-the-dev-into-devops/pic9.webp b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic9.webp new file mode 100644 index 0000000000..317f9105d7 Binary files /dev/null and b/runatlantis.io/public/blog/putting-the-dev-into-devops/pic9.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic1.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic1.webp new file mode 100644 index 0000000000..56e33a387b Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic1.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic10.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic10.webp new file mode 100644 index 0000000000..d3f8046f7c Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic10.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic11.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic11.webp new file mode 100644 index 0000000000..d8f30438b2 Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic11.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic2.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic2.webp new file mode 100644 index 0000000000..702c5e14ee Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic2.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic3.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic3.webp new file mode 100644 index 0000000000..1931d85587 Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic3.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic4.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic4.webp new file mode 100644 index 0000000000..6b2c2149f7 Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic4.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic5.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic5.webp new file mode 100644 index 0000000000..36218014f4 Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic5.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic6.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic6.webp new file mode 100644 index 0000000000..ea78d58e6f Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic6.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic7.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic7.webp new file mode 100644 index 0000000000..44ba595bbd Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic7.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic8.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic8.webp new file mode 100644 index 0000000000..3b80ecff50 Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic8.webp differ diff --git a/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic9.webp b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic9.webp new file mode 100644 index 0000000000..15529d2506 Binary files /dev/null and b/runatlantis.io/public/blog/terraform-and-the-dangers-of-applying-locally/pic9.webp differ diff --git a/runatlantis.io/.vuepress/public/favicon-128.png b/runatlantis.io/public/favicon-128.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-128.png rename to runatlantis.io/public/favicon-128.png diff --git a/runatlantis.io/.vuepress/public/favicon-16x16.png b/runatlantis.io/public/favicon-16x16.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-16x16.png rename to runatlantis.io/public/favicon-16x16.png diff --git a/runatlantis.io/.vuepress/public/favicon-196x196.png b/runatlantis.io/public/favicon-196x196.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-196x196.png rename to runatlantis.io/public/favicon-196x196.png diff --git a/runatlantis.io/.vuepress/public/favicon-32x32.png b/runatlantis.io/public/favicon-32x32.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-32x32.png rename to runatlantis.io/public/favicon-32x32.png diff --git a/runatlantis.io/.vuepress/public/favicon-96x96.png b/runatlantis.io/public/favicon-96x96.png similarity index 100% rename from runatlantis.io/.vuepress/public/favicon-96x96.png rename to runatlantis.io/public/favicon-96x96.png diff --git a/runatlantis.io/.vuepress/public/favicon.ico b/runatlantis.io/public/favicon.ico similarity index 100% rename from runatlantis.io/.vuepress/public/favicon.ico rename to runatlantis.io/public/favicon.ico diff --git a/runatlantis.io/.vuepress/public/hero.png b/runatlantis.io/public/hero.png similarity index 100% rename from runatlantis.io/.vuepress/public/hero.png rename to runatlantis.io/public/hero.png diff --git a/runatlantis.io/.vuepress/public/hightower-super-dope.svg b/runatlantis.io/public/hightower-super-dope.svg similarity index 100% rename from runatlantis.io/.vuepress/public/hightower-super-dope.svg rename to runatlantis.io/public/hightower-super-dope.svg diff --git a/runatlantis.io/.vuepress/public/mstile-144x144.png b/runatlantis.io/public/mstile-144x144.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-144x144.png rename to runatlantis.io/public/mstile-144x144.png diff --git a/runatlantis.io/.vuepress/public/mstile-150x150.png b/runatlantis.io/public/mstile-150x150.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-150x150.png rename to runatlantis.io/public/mstile-150x150.png diff --git a/runatlantis.io/.vuepress/public/mstile-310x150.png b/runatlantis.io/public/mstile-310x150.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-310x150.png rename to runatlantis.io/public/mstile-310x150.png diff --git a/runatlantis.io/.vuepress/public/mstile-310x310.png b/runatlantis.io/public/mstile-310x310.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-310x310.png rename to runatlantis.io/public/mstile-310x310.png diff --git a/runatlantis.io/.vuepress/public/mstile-70x70.png b/runatlantis.io/public/mstile-70x70.png similarity index 100% rename from runatlantis.io/.vuepress/public/mstile-70x70.png rename to runatlantis.io/public/mstile-70x70.png diff --git a/scripts/download-release.sh b/scripts/download-release.sh new file mode 100755 index 0000000000..8b661bf1a1 --- /dev/null +++ b/scripts/download-release.sh @@ -0,0 +1,33 @@ +#!/bin/sh +COMMAND_NAME=${1:-terraform} +TARGETPLATFORM=${2:-"linux/amd64"} +DEFAULT_VERSION=${3:-"1.8.0"} +AVAILABLE_VERSIONS=${4:-"1.8.0"} +case "${TARGETPLATFORM}" in + "linux/amd64") ARCH=amd64 ;; + "linux/arm64") ARCH=arm64 ;; + "linux/arm/v7") ARCH=arm ;; + *) echo "ERROR: 'TARGETPLATFORM' value unexpected: ${TARGETPLATFORM}"; exit 1 ;; +esac +for VERSION in ${AVAILABLE_VERSIONS}; do + case "${COMMAND_NAME}" in + "terraform") + DOWNLOAD_URL_FORMAT=$(printf 'https://releases.hashicorp.com/terraform/%s/%s_%s' "$VERSION" "$COMMAND_NAME" "$VERSION") + COMMAND_DIR=/usr/local/bin/terraform + ;; + "tofu") + DOWNLOAD_URL_FORMAT=$(printf 'https://github.com/opentofu/opentofu/releases/download/v%s/%s_%s' "$VERSION" "$COMMAND_NAME" "$VERSION") + COMMAND_DIR=/usr/local/bin/tofu + ;; + *) echo "ERROR: 'COMMAND_NAME' value unexpected: ${COMMAND_NAME}"; exit 1 ;; + esac + curl -LOs "${DOWNLOAD_URL_FORMAT}_linux_${ARCH}.zip" + curl -LOs "${DOWNLOAD_URL_FORMAT}_SHA256SUMS" + sed -n "/${COMMAND_NAME}_${VERSION}_linux_${ARCH}.zip/p" "${COMMAND_NAME}_${VERSION}_SHA256SUMS" | sha256sum -c + mkdir -p "${COMMAND_DIR}/${VERSION}" + unzip "${COMMAND_NAME}_${VERSION}_linux_${ARCH}.zip" -d "${COMMAND_DIR}/${VERSION}" + ln -s "${COMMAND_DIR}/${VERSION}/${COMMAND_NAME}" "${COMMAND_DIR}/${COMMAND_NAME}${VERSION}" + rm "${COMMAND_NAME}_${VERSION}_linux_${ARCH}.zip" + rm "${COMMAND_NAME}_${VERSION}_SHA256SUMS" +done +ln -s "${COMMAND_DIR}/${DEFAULT_VERSION}/${COMMAND_NAME}" "${COMMAND_DIR}/${COMMAND_NAME}" diff --git a/scripts/e2e-deps.sh b/scripts/e2e-deps.sh deleted file mode 100755 index 4ac9b9272b..0000000000 --- a/scripts/e2e-deps.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env bash - -# Exit immediately if a command returns a non-zero code -set -e - -echo "Preparing to run e2e tests" -if [ ! -f atlantis ]; then - echo "atlantis binary not found. exiting...." - exit 1 -fi -cp atlantis ${CIRCLE_WORKING_DIRECTORY}/e2e/ - -# cd into e2e folder -cd e2e/ -# Download terraform -curl -LOk https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip -unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip -chmod +x terraform -cp terraform /home/circleci/go/bin -# Download ngrok to create a tunnel to expose atlantis server -wget https://bin.equinox.io/c/bNyj1mQVY4c/ngrok-v2-stable-linux-amd64.zip -O ngrok-stable-linux-amd64.zip -unzip ngrok-stable-linux-amd64.zip -chmod +x ngrok -# Download jq -wget -O jq https://github.com/jqlang/jq/releases/download/jq-1.6/jq-linux64 -chmod +x jq -# Copy github config file - replace with circleci user later -cp .gitconfig ~/.gitconfig diff --git a/scripts/e2e.sh b/scripts/e2e.sh index 115c964454..8e531d9563 100755 --- a/scripts/e2e.sh +++ b/scripts/e2e.sh @@ -2,37 +2,79 @@ set -euo pipefail IFS=$'\n\t' +ATLANTIS_PID="" +NGROK_PID="" + +function cleanup() { + cleanupPid "$ATLANTIS_PID" + cleanupPid "$NGROK_PID" +} + +function cleanupPid() { + local pid="$1" + # Never set, no need to clean up + if [[ "$pid" == "" ]] + then + return + fi + # Somehow pid was not number, just being careful + if ! [[ "$pid" =~ ^[0-9]+$ ]] + then + return + fi + # Not currently running, no need to kill + if ! ps -p "$pid" &>/dev/null + then + return + fi + kill $pid +} -# download all the tooling needed for e2e tests -CIRCLE_WORKING_DIRECTORY="${CIRCLE_WORKING_DIRECTORY/#\~/$HOME}" # https://discuss.circleci.com/t/circle-working-directory-doesnt-expand/17007/5 -${CIRCLE_WORKING_DIRECTORY}/scripts/e2e-deps.sh -cd "${CIRCLE_WORKING_DIRECTORY}/e2e" # start atlantis server in the background and wait for it to start +# It's the responsibility of the caller of this script to set the github, gitlab, etc. +# permissions via environment variable ./atlantis server \ - --gh-user="$GITHUB_USERNAME" \ - --gh-token="$GITHUB_PASSWORD" \ --data-dir="/tmp" \ --log-level="debug" \ - --repo-allowlist="github.com/runatlantis/atlantis-tests" \ + --repo-allowlist="github.com/runatlantis/atlantis-tests,gitlab.com/run-atlantis/atlantis-tests" \ --repo-config-json='{"repos":[{"id":"/.*/", "allowed_overrides":["apply_requirements","workflow"], "allow_custom_workflows":true}]}' \ &> /tmp/atlantis-server.log & +ATLANTIS_PID=$! sleep 2 +if ! ps -p "$ATLANTIS_PID" &>/dev/null +then + echo "Atlantis failed to start" + cat /tmp/atlantis-server.log + exit 1 +fi +echo "Atlantis is running..." # start ngrok in the background and wait for it to start -./ngrok config add-authtoken $NGROK_AUTH_TOKEN -./ngrok http 4141 > /tmp/ngrok.log & +./ngrok config add-authtoken $NGROK_AUTH_TOKEN > /dev/null 2>&1 +./ngrok http 4141 > /tmp/ngrok.log 2>&1 & +NGROK_PID=$! sleep 2 +if ! ps -p "$NGROK_PID" &>/dev/null +then + cleanup + echo "Ngrok failed to start" + cat /tmp/ngrok.log + exit 1 +fi +echo "Ngrok is running..." # find out what URL ngrok has given us export ATLANTIS_URL=$(curl -s 'http://localhost:4040/api/tunnels' | jq -r '.tunnels[] | select(.proto=="https") | .public_url') # Now we can start the e2e tests +cd "${GITHUB_WORKSPACE:-$(git rev-parse --show-toplevel)}/e2e" echo "Running 'make build'" make build echo "Running e2e test: 'make run'" set +e +estatus=0 make run if [[ $? -eq 0 ]] then @@ -41,5 +83,7 @@ else echo "e2e tests failed" echo "atlantis logs:" cat /tmp/atlantis-server.log - exit 1 + estatus=1 fi +cleanup +exit $estatus diff --git a/scripts/fmt.sh b/scripts/fmt.sh new file mode 100755 index 0000000000..b8a5aef752 --- /dev/null +++ b/scripts/fmt.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -euo pipefail + +go install golang.org/x/tools/cmd/goimports@latest + +gobin="$(go env GOPATH)/bin" +declare -r gobin + +declare -a files +readarray -d '' files < <(find . -type f -name '*.go' ! -name 'mock_*' ! -path './vendor/*' ! -path '**/mocks/*' -print0) +declare -r files + +output="$("${gobin}"/goimports -l "${files[@]}")" +declare -r output + +if [[ -n "$output" ]]; then + echo "These files had their 'import' changed - please fix them locally and push a fix" + + echo "$output" + + exit 1 +fi diff --git a/scripts/pin_ci_terraform_providers.sh b/scripts/pin_ci_terraform_providers.sh new file mode 100755 index 0000000000..db4d9256de --- /dev/null +++ b/scripts/pin_ci_terraform_providers.sh @@ -0,0 +1,74 @@ +#!/bin/bash + +# Script to pin terraform providers in e2e tests + +RANDOM_PROVIDER_VERSION="3.6.1" +NULL_PROVIDER_VERSION="3.2.3" + +TEST_REPOS_DIR="server/controllers/events/testdata/test-repos" + +for file in $(find $TEST_REPOS_DIR -name '*.tf') +do + basename=$(basename $file) + if [[ "$basename" == "versions.tf" ]] + then + continue + fi + if [[ "$basename" != "main.tf" ]] + then + echo "Found unexpected file: $file" + exit 1 + fi + has_null_provider=false + has_random_provider=false + + version_file="$(dirname $file)/versions.tf" + for resource in $(cat $file | grep '^resource' | awk '{print $2}' | tr -d '"') + do + if [[ "$resource" == "null_resource" ]] + then + has_null_provider=true + elif [[ "$resource" == "random_id" ]] + then + has_random_provider=true + else + echo "Unknown resource $resource in $file" + exit 1 + fi + done + if ! $has_null_provider && ! $has_random_provider + then + echo "No providers needed for $file" + continue + fi + echo "Adding $version_file for $file" + rm -f $version_file + if $has_null_provider + then + echo 'provider "null" {}' >> $version_file + fi + if $has_random_provider + then + echo 'provider "random" {}' >> $version_file + fi + echo "terraform {" >> $version_file + echo " required_providers {" >> $version_file + + if $has_random_provider + then + echo " random = {" >> $version_file + echo ' source = "hashicorp/random"' >> $version_file + echo " version = \"= $RANDOM_PROVIDER_VERSION\"" >> $version_file + echo " }" >> $version_file + fi + if $has_null_provider + then + echo " null = {" >> $version_file + echo ' source = "hashicorp/null"' >> $version_file + echo " version = \"= $NULL_PROVIDER_VERSION\"" >> $version_file + echo " }" >> $version_file + fi + echo " }" >> $version_file + echo "}" >> $version_file + +done diff --git a/server/controllers/api_controller.go b/server/controllers/api_controller.go index 784120e982..f9b6aa809b 100644 --- a/server/controllers/api_controller.go +++ b/server/controllers/api_controller.go @@ -20,16 +20,19 @@ import ( const atlantisTokenHeader = "X-Atlantis-Token" type APIController struct { - APISecret []byte - Locker locking.Locker - Logger logging.SimpleLogging - Parser events.EventParsing - ProjectCommandBuilder events.ProjectCommandBuilder - ProjectPlanCommandRunner events.ProjectPlanCommandRunner - ProjectApplyCommandRunner events.ProjectApplyCommandRunner - RepoAllowlistChecker *events.RepoAllowlistChecker - Scope tally.Scope - VCSClient vcs.Client + APISecret []byte + Locker locking.Locker + Logger logging.SimpleLogging + Parser events.EventParsing + ProjectCommandBuilder events.ProjectCommandBuilder + ProjectPlanCommandRunner events.ProjectPlanCommandRunner + ProjectApplyCommandRunner events.ProjectApplyCommandRunner + FailOnPreWorkflowHookError bool + PreWorkflowHooksCommandRunner events.PreWorkflowHooksCommandRunner + PostWorkflowHooksCommandRunner events.PostWorkflowHooksCommandRunner + RepoAllowlistChecker *events.RepoAllowlistChecker + Scope tally.Scope + VCSClient vcs.Client } type APIRequest struct { @@ -44,7 +47,7 @@ type APIRequest struct { } } -func (a *APIRequest) getCommands(ctx *command.Context, cmdBuilder func(*command.Context, *events.CommentCommand) ([]command.ProjectContext, error)) ([]command.ProjectContext, error) { +func (a *APIRequest) getCommands(ctx *command.Context, cmdBuilder func(*command.Context, *events.CommentCommand) ([]command.ProjectContext, error)) ([]command.ProjectContext, []*events.CommentCommand, error) { cc := make([]*events.CommentCommand, 0) for _, project := range a.Projects { @@ -63,19 +66,19 @@ func (a *APIRequest) getCommands(ctx *command.Context, cmdBuilder func(*command. for _, commentCommand := range cc { projectCmds, err := cmdBuilder(ctx, commentCommand) if err != nil { - return nil, fmt.Errorf("failed to build command: %v", err) + return nil, nil, fmt.Errorf("failed to build command: %v", err) } cmds = append(cmds, projectCmds...) } - return cmds, nil + return cmds, cc, nil } func (a *APIController) apiReportError(w http.ResponseWriter, code int, err error) { response, _ := json.Marshal(map[string]string{ "error": err.Error(), }) - a.respond(w, logging.Warn, code, string(response)) + a.respond(w, logging.Warn, code, "%s", string(response)) } func (a *APIController) Plan(w http.ResponseWriter, r *http.Request) { @@ -103,7 +106,7 @@ func (a *APIController) Plan(w http.ResponseWriter, r *http.Request) { a.apiReportError(w, http.StatusInternalServerError, err) return } - a.respond(w, logging.Debug, code, string(response)) + a.respond(w, logging.Warn, code, "%s", string(response)) } func (a *APIController) Apply(w http.ResponseWriter, r *http.Request) { @@ -138,33 +141,59 @@ func (a *APIController) Apply(w http.ResponseWriter, r *http.Request) { a.apiReportError(w, http.StatusInternalServerError, err) return } - a.respond(w, logging.Debug, code, string(response)) + a.respond(w, logging.Warn, code, "%s", string(response)) } func (a *APIController) apiPlan(request *APIRequest, ctx *command.Context) (*command.Result, error) { - cmds, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildPlanCommands) + cmds, cc, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildPlanCommands) if err != nil { return nil, err } var projectResults []command.ProjectResult - for _, cmd := range cmds { + for i, cmd := range cmds { + err = a.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running pre-workflow hooks %s.", err) + if a.FailOnPreWorkflowHookError { + return nil, err + } + } + res := a.ProjectPlanCommandRunner.Plan(cmd) projectResults = append(projectResults, res) + + err = a.PostWorkflowHooksCommandRunner.RunPostHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running post-workflow hooks %s.", err) + } } return &command.Result{ProjectResults: projectResults}, nil } func (a *APIController) apiApply(request *APIRequest, ctx *command.Context) (*command.Result, error) { - cmds, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildApplyCommands) + cmds, cc, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildApplyCommands) if err != nil { return nil, err } var projectResults []command.ProjectResult - for _, cmd := range cmds { + for i, cmd := range cmds { + err = a.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running pre-workflow hooks %s.", err) + if a.FailOnPreWorkflowHookError { + return nil, err + } + } + res := a.ProjectApplyCommandRunner.Apply(cmd) projectResults = append(projectResults, res) + + err = a.PostWorkflowHooksCommandRunner.RunPostHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running post-workflow hooks %s.", err) + } } return &command.Result{ProjectResults: projectResults}, nil } @@ -197,7 +226,7 @@ func (a *APIController) apiParseAndValidate(r *http.Request) (*APIRequest, *comm if err != nil { return nil, nil, http.StatusBadRequest, err } - cloneURL, err := a.VCSClient.GetCloneURL(VCSHostType, request.Repository) + cloneURL, err := a.VCSClient.GetCloneURL(a.Logger, VCSHostType, request.Repository) if err != nil { return nil, nil, http.StatusInternalServerError, err } @@ -223,6 +252,7 @@ func (a *APIController) apiParseAndValidate(r *http.Request) (*APIRequest, *comm }, Scope: a.Scope, Log: a.Logger, + API: true, }, http.StatusOK, nil } diff --git a/server/controllers/api_controller_test.go b/server/controllers/api_controller_test.go index 1f2370ef08..3b3aa520aa 100644 --- a/server/controllers/api_controller_test.go +++ b/server/controllers/api_controller_test.go @@ -86,17 +86,27 @@ func setup(t *testing.T) (controllers.APIController, *MockProjectCommandBuilder, ApplySuccess: "success", }) + preWorkflowHooksCommandRunner := NewMockPreWorkflowHooksCommandRunner() + + When(preWorkflowHooksCommandRunner.RunPreHooks(Any[*command.Context](), Any[*events.CommentCommand]())).ThenReturn(nil) + + postWorkflowHooksCommandRunner := NewMockPostWorkflowHooksCommandRunner() + + When(postWorkflowHooksCommandRunner.RunPostHooks(Any[*command.Context](), Any[*events.CommentCommand]())).ThenReturn(nil) + ac := controllers.APIController{ - APISecret: []byte(atlantisToken), - Locker: locker, - Logger: logger, - Scope: scope, - Parser: parser, - ProjectCommandBuilder: projectCommandBuilder, - ProjectPlanCommandRunner: projectCommandRunner, - ProjectApplyCommandRunner: projectCommandRunner, - VCSClient: vcsClient, - RepoAllowlistChecker: repoAllowlistChecker, + APISecret: []byte(atlantisToken), + Locker: locker, + Logger: logger, + Scope: scope, + Parser: parser, + ProjectCommandBuilder: projectCommandBuilder, + ProjectPlanCommandRunner: projectCommandRunner, + ProjectApplyCommandRunner: projectCommandRunner, + PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, + PostWorkflowHooksCommandRunner: postWorkflowHooksCommandRunner, + VCSClient: vcsClient, + RepoAllowlistChecker: repoAllowlistChecker, } return ac, projectCommandBuilder, projectCommandRunner } diff --git a/server/controllers/events/events_controller.go b/server/controllers/events/events_controller.go index 932001ffc2..b0257eebea 100644 --- a/server/controllers/events/events_controller.go +++ b/server/controllers/events/events_controller.go @@ -14,12 +14,13 @@ package events import ( + "encoding/json" "fmt" "io" "net/http" "strings" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/mcdafydd/go-azuredevops/azuredevops" "github.com/microcosm-cc/bluemonday" "github.com/pkg/errors" @@ -28,6 +29,7 @@ import ( "github.com/runatlantis/atlantis/server/events/vcs" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver" + "github.com/runatlantis/atlantis/server/events/vcs/gitea" "github.com/runatlantis/atlantis/server/logging" tally "github.com/uber-go/tally/v4" gitlab "github.com/xanzy/go-gitlab" @@ -37,6 +39,11 @@ const githubHeader = "X-Github-Event" const gitlabHeader = "X-Gitlab-Event" const azuredevopsHeader = "Request-Id" +const giteaHeader = "X-Gitea-Event" +const giteaEventTypeHeader = "X-Gitea-Event-Type" +const giteaSignatureHeader = "X-Gitea-Signature" +const giteaRequestIDHeader = "X-Gitea-Delivery" + // bitbucketEventTypeHeader is the same in both cloud and server. const bitbucketEventTypeHeader = "X-Event-Key" const bitbucketCloudRequestIDHeader = "X-Request-UUID" @@ -91,11 +98,20 @@ type VCSEventsController struct { // Azure DevOps Team Project. If empty, no request validation is done. AzureDevopsWebhookBasicPassword []byte AzureDevopsRequestValidator AzureDevopsRequestValidator + GiteaWebhookSecret []byte } // Post handles POST webhook requests. func (e *VCSEventsController) Post(w http.ResponseWriter, r *http.Request) { - if r.Header.Get(githubHeader) != "" { + if r.Header.Get(giteaHeader) != "" { + if !e.supportsHost(models.Gitea) { + e.respond(w, logging.Debug, http.StatusBadRequest, "Ignoring request since not configured to support Gitea") + return + } + e.Logger.Debug("handling Gitea post") + e.handleGiteaPost(w, r) + return + } else if r.Header.Get(githubHeader) != "" { if !e.supportsHost(models.Github) { e.respond(w, logging.Debug, http.StatusBadRequest, "Ignoring request since not configured to support GitHub") return @@ -158,7 +174,7 @@ func (e *VCSEventsController) handleGithubPost(w http.ResponseWriter, r *http.Re // Validate the request against the optional webhook secret. payload, err := e.GithubRequestValidator.Validate(r, e.GithubWebhookSecret) if err != nil { - e.respond(w, logging.Warn, http.StatusBadRequest, err.Error()) + e.respond(w, logging.Warn, http.StatusBadRequest, "%s", err.Error()) return } @@ -250,7 +266,7 @@ func (e *VCSEventsController) handleBitbucketServerPost(w http.ResponseWriter, r } if len(e.BitbucketWebhookSecret) > 0 { if err := bitbucketserver.ValidateSignature(body, sig, e.BitbucketWebhookSecret); err != nil { - e.respond(w, logging.Warn, http.StatusBadRequest, errors.Wrap(err, "request did not pass validation").Error()) + e.respond(w, logging.Warn, http.StatusBadRequest, "%s", errors.Wrap(err, "request did not pass validation").Error()) return } } @@ -272,7 +288,7 @@ func (e *VCSEventsController) handleAzureDevopsPost(w http.ResponseWriter, r *ht // Validate the request against the optional basic auth username and password. payload, err := e.AzureDevopsRequestValidator.Validate(r, e.AzureDevopsWebhookBasicUser, e.AzureDevopsWebhookBasicPassword) if err != nil { - e.respond(w, logging.Warn, http.StatusUnauthorized, err.Error()) + e.respond(w, logging.Warn, http.StatusUnauthorized, "%s", err.Error()) return } e.Logger.Debug("request valid") @@ -295,6 +311,91 @@ func (e *VCSEventsController) handleAzureDevopsPost(w http.ResponseWriter, r *ht } } +func (e *VCSEventsController) handleGiteaPost(w http.ResponseWriter, r *http.Request) { + signature := r.Header.Get(giteaSignatureHeader) + eventType := r.Header.Get(giteaEventTypeHeader) + reqID := r.Header.Get(giteaRequestIDHeader) + + defer r.Body.Close() // Ensure the request body is closed + + body, err := io.ReadAll(r.Body) + if err != nil { + e.respond(w, logging.Error, http.StatusBadRequest, "Unable to read body: %s %s=%s", err, "X-Gitea-Delivery", reqID) + return + } + + if len(e.GiteaWebhookSecret) > 0 { + if err := gitea.ValidateSignature(body, signature, e.GiteaWebhookSecret); err != nil { + e.respond(w, logging.Warn, http.StatusBadRequest, "%s", errors.Wrap(err, "request did not pass validation").Error()) + return + } + } + + // Log the event type for debugging purposes + e.Logger.Debug("Received Gitea event %s with ID %s", eventType, reqID) + + // Depending on the event type, handle the event appropriately + switch eventType { + case "pull_request_comment": + e.HandleGiteaPullRequestCommentEvent(w, body, reqID) + case "pull_request": + e.Logger.Debug("Handling as pull_request") + e.handleGiteaPullRequestEvent(w, body, reqID) + // Add other case handlers as necessary + default: + e.respond(w, logging.Debug, http.StatusOK, "Ignoring unsupported Gitea event type: %s %s=%s", eventType, "X-Gitea-Delivery", reqID) + } +} + +func (e *VCSEventsController) handleGiteaPullRequestEvent(w http.ResponseWriter, body []byte, reqID string) { + e.Logger.Debug("Entering handleGiteaPullRequestEvent") + // Attempt to unmarshal the incoming body into the Gitea PullRequest struct + var payload gitea.GiteaWebhookPayload + if err := json.Unmarshal(body, &payload); err != nil { + e.Logger.Err("Failed to unmarshal Gitea webhook payload: %v", err) + e.respond(w, logging.Error, http.StatusBadRequest, "Failed to parse request body") + return + } + + e.Logger.Debug("Successfully unmarshaled Gitea event") + + // Use the parser function to convert into Atlantis models + pull, pullEventType, baseRepo, headRepo, user, err := e.Parser.ParseGiteaPullRequestEvent(payload.PullRequest) + if err != nil { + e.Logger.Err("Failed to parse Gitea pull request event: %v", err) + e.respond(w, logging.Error, http.StatusInternalServerError, "Failed to process event") + return + } + + e.Logger.Debug("Parsed Gitea event into Atlantis models successfully") + + logger := e.Logger.With("gitea-request-id", reqID) + logger.Debug("Identified Gitea event as type", "type", pullEventType) + + // Call a generic handler for pull request events + response := e.handlePullRequestEvent(logger, baseRepo, headRepo, pull, user, pullEventType) + + e.respond(w, logging.Debug, http.StatusOK, "%s", response.body) +} + +// HandleGiteaPullRequestCommentEvent handles comment events from Gitea where Atlantis commands can come from. +func (e *VCSEventsController) HandleGiteaPullRequestCommentEvent(w http.ResponseWriter, body []byte, reqID string) { + var event gitea.GiteaIssueCommentPayload + if err := json.Unmarshal(body, &event); err != nil { + e.Logger.Err("Failed to unmarshal Gitea comment payload: %v", err) + e.respond(w, logging.Error, http.StatusBadRequest, "Failed to parse request body") + return + } + e.Logger.Debug("Successfully unmarshaled Gitea comment event") + + baseRepo, user, pullNum, _ := e.Parser.ParseGiteaIssueCommentEvent(event) + // Since we're lacking headRepo and maybePull details, we'll pass nil + // This follows the same approach as the GitHub client for handling comment events without full PR details + response := e.handleCommentEvent(e.Logger, baseRepo, nil, nil, user, pullNum, event.Comment.Body, event.Comment.ID, models.Gitea) + + e.respond(w, logging.Debug, http.StatusOK, "%s", response.body) +} + // HandleGithubCommentEvent handles comment events from GitHub where Atlantis // commands can come from. It's exported to make testing easier. func (e *VCSEventsController) HandleGithubCommentEvent(event *github.IssueCommentEvent, githubReqID string, logger logging.SimpleLogging) HTTPResponse { @@ -304,7 +405,7 @@ func (e *VCSEventsController) HandleGithubCommentEvent(event *github.IssueCommen } } - baseRepo, user, pullNum, err := e.Parser.ParseGithubIssueCommentEvent(event) + baseRepo, user, pullNum, err := e.Parser.ParseGithubIssueCommentEvent(logger, event) wrapped := errors.Wrapf(err, "Failed parsing event: %s", githubReqID) if err != nil { @@ -343,7 +444,7 @@ func (e *VCSEventsController) HandleBitbucketCloudCommentEvent(w http.ResponseWr code = resp.err.code msg = resp.err.err.Error() } - e.respond(w, lvl, code, msg) + e.respond(w, lvl, code, "%s", msg) } // HandleBitbucketServerCommentEvent handles comment events from Bitbucket. @@ -364,7 +465,7 @@ func (e *VCSEventsController) HandleBitbucketServerCommentEvent(w http.ResponseW code = resp.err.code msg = resp.err.err.Error() } - e.respond(w, lvl, code, msg) + e.respond(w, lvl, code, "%s", msg) } func (e *VCSEventsController) handleBitbucketCloudPullRequestEvent(w http.ResponseWriter, eventType string, body []byte, reqID string) { @@ -387,7 +488,7 @@ func (e *VCSEventsController) handleBitbucketCloudPullRequestEvent(w http.Respon code = resp.err.code msg = resp.err.err.Error() } - e.respond(w, lvl, code, msg) + e.respond(w, lvl, code, "%s", msg) } func (e *VCSEventsController) handleBitbucketServerPullRequestEvent(w http.ResponseWriter, eventType string, body []byte, reqID string) { @@ -409,14 +510,14 @@ func (e *VCSEventsController) handleBitbucketServerPullRequestEvent(w http.Respo code = resp.err.code msg = resp.err.err.Error() } - e.respond(w, lvl, code, msg) + e.respond(w, lvl, code, "%s", msg) } // HandleGithubPullRequestEvent will delete any locks associated with the pull // request if the event is a pull request closed event. It's exported to make // testing easier. func (e *VCSEventsController) HandleGithubPullRequestEvent(logger logging.SimpleLogging, pullEvent *github.PullRequestEvent, githubReqID string) HTTPResponse { - pull, pullEventType, baseRepo, headRepo, user, err := e.Parser.ParseGithubPullEvent(pullEvent) + pull, pullEventType, baseRepo, headRepo, user, err := e.Parser.ParseGithubPullEvent(logger, pullEvent) if err != nil { wrapped := errors.Wrapf(err, "Error parsing pull data: %s %s", err, githubReqID) return HTTPResponse{ @@ -472,7 +573,7 @@ func (e *VCSEventsController) handlePullRequestEvent(logger logging.SimpleLoggin } case models.ClosedPullEvent: // If the pull request was closed, we delete locks. - if err := e.PullCleaner.CleanUpPull(baseRepo, pull); err != nil { + if err := e.PullCleaner.CleanUpPull(logger, baseRepo, pull); err != nil { return HTTPResponse{ body: err.Error(), err: HTTPError{ @@ -498,7 +599,7 @@ func (e *VCSEventsController) handlePullRequestEvent(logger logging.SimpleLoggin func (e *VCSEventsController) handleGitlabPost(w http.ResponseWriter, r *http.Request) { event, err := e.GitlabRequestParserValidator.ParseAndValidate(r, e.GitlabWebhookSecret) if err != nil { - e.respond(w, logging.Warn, http.StatusBadRequest, err.Error()) + e.respond(w, logging.Warn, http.StatusBadRequest, "%s", err.Error()) return } e.Logger.Debug("request valid") @@ -539,10 +640,15 @@ func (e *VCSEventsController) HandleGitlabCommentEvent(w http.ResponseWriter, ev code = resp.err.code msg = resp.err.err.Error() } - e.respond(w, lvl, code, msg) + e.respond(w, lvl, code, "%s", msg) } func (e *VCSEventsController) handleCommentEvent(logger logging.SimpleLogging, baseRepo models.Repo, maybeHeadRepo *models.Repo, maybePull *models.PullRequest, user models.User, pullNum int, comment string, commentID int64, vcsHost models.VCSHostType) HTTPResponse { + logger = logger.WithHistory( + "repo", baseRepo.FullName, + "pull", pullNum, + ) + parseResult := e.CommentParser.Parse(comment, vcsHost) if parseResult.Ignore { truncated := comment @@ -573,9 +679,9 @@ func (e *VCSEventsController) handleCommentEvent(logger logging.SimpleLogging, b } } - // It's a comment we're gonna react to, so add a reaction. + // It's a comment we're going to react to so add a reaction. if e.EmojiReaction != "" { - err := e.VCSClient.ReactToComment(baseRepo, pullNum, commentID, e.EmojiReaction) + err := e.VCSClient.ReactToComment(logger, baseRepo, pullNum, commentID, e.EmojiReaction) if err != nil { logger.Warn("Failed to react to comment: %s", err) } @@ -586,7 +692,7 @@ func (e *VCSEventsController) handleCommentEvent(logger logging.SimpleLogging, b // We do this here rather than earlier because we need access to the pull // variable to comment back on the pull request. if parseResult.CommentResponse != "" { - if err := e.VCSClient.CreateComment(baseRepo, pullNum, parseResult.CommentResponse, ""); err != nil { + if err := e.VCSClient.CreateComment(logger, baseRepo, pullNum, parseResult.CommentResponse, ""); err != nil { logger.Err("unable to comment on pull request: %s", err) } return HTTPResponse{ @@ -636,7 +742,7 @@ func (e *VCSEventsController) HandleGitlabMergeRequestEvent(w http.ResponseWrite code = resp.err.code msg = resp.err.err.Error() } - e.respond(w, lvl, code, msg) + e.respond(w, lvl, code, "%s", msg) } // HandleAzureDevopsPullRequestCommentedEvent handles comment events from Azure DevOps where Atlantis @@ -690,7 +796,7 @@ func (e *VCSEventsController) HandleAzureDevopsPullRequestCommentedEvent(w http. code = resp.err.code msg = resp.err.err.Error() } - e.respond(w, lvl, code, msg) + e.respond(w, lvl, code, "%s", msg) } // HandleAzureDevopsPullRequestEvent will delete any locks associated with the pull @@ -741,7 +847,7 @@ func (e *VCSEventsController) HandleAzureDevopsPullRequestEvent(w http.ResponseW code = resp.err.code msg = resp.err.err.Error() } - e.respond(w, lvl, code, msg) + e.respond(w, lvl, code, "%s", msg) } // supportsHost returns true if h is in e.SupportedVCSHosts and false otherwise. @@ -769,7 +875,7 @@ func (e *VCSEventsController) commentNotAllowlisted(baseRepo models.Repo, pullNu } errMsg := "```\nError: This repo is not allowlisted for Atlantis.\n```" - if err := e.VCSClient.CreateComment(baseRepo, pullNum, errMsg, ""); err != nil { + if err := e.VCSClient.CreateComment(e.Logger, baseRepo, pullNum, errMsg, ""); err != nil { e.Logger.Err("unable to comment on pull request: %s", err) } } diff --git a/server/controllers/events/events_controller_e2e_test.go b/server/controllers/events/events_controller_e2e_test.go index aaf7f8d8ab..6b985fd9b8 100644 --- a/server/controllers/events/events_controller_e2e_test.go +++ b/server/controllers/events/events_controller_e2e_test.go @@ -13,7 +13,7 @@ import ( "strings" "testing" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/hashicorp/go-version" . "github.com/petergtz/pegomock/v4" @@ -26,7 +26,9 @@ import ( "github.com/runatlantis/atlantis/server/core/runtime" runtimemocks "github.com/runatlantis/atlantis/server/core/runtime/mocks" "github.com/runatlantis/atlantis/server/core/runtime/policy" + mock_policy "github.com/runatlantis/atlantis/server/core/runtime/policy/mocks" "github.com/runatlantis/atlantis/server/core/terraform" + terraform_mocks "github.com/runatlantis/atlantis/server/core/terraform/mocks" "github.com/runatlantis/atlantis/server/events" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/mocks" @@ -53,12 +55,8 @@ var mockPreWorkflowHookRunner *runtimemocks.MockPreWorkflowHookRunner var mockPostWorkflowHookRunner *runtimemocks.MockPostWorkflowHookRunner -func (m *NoopTFDownloader) GetFile(_, _ string) error { - return nil -} - -func (m *NoopTFDownloader) GetAny(_, _ string) error { - return nil +func (m *NoopTFDownloader) Install(_ string, _ string, _ *version.Version) (string, error) { + return "", nil } type LocalConftestCache struct { @@ -643,8 +641,10 @@ func TestGitHubWorkflow(t *testing.T) { // Setup test dependencies. w := httptest.NewRecorder() - When(githubGetter.GetPullRequest(Any[models.Repo](), Any[int]())).ThenReturn(GitHubPullRequestParsed(headSHA), nil) - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(githubGetter.GetPullRequest( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int]())).ThenReturn(GitHubPullRequestParsed(headSHA), nil) + When(vcsClient.GetModifiedFiles( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) // First, send the open pull request event which triggers autoplan. pullOpenedReq := GitHubPullRequestOpenedEvent(t, headSHA) @@ -707,7 +707,8 @@ func TestGitHubWorkflow(t *testing.T) { expNumReplies++ } - _, _, actReplies, _ := vcsClient.VerifyWasCalled(Times(expNumReplies)).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetAllCapturedArguments() + _, _, _, actReplies, _ := vcsClient.VerifyWasCalled(Times(expNumReplies)).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetAllCapturedArguments() Assert(t, len(c.ExpReplies) == len(actReplies), "missing expected replies, got %d but expected %d", len(actReplies), len(c.ExpReplies)) for i, expReply := range c.ExpReplies { assertCommentEquals(t, expReply, actReplies[i], c.RepoDir, c.ExpParallel) @@ -715,9 +716,9 @@ func TestGitHubWorkflow(t *testing.T) { if c.ExpAutomerge { // Verify that the merge API call was made. - vcsClient.VerifyWasCalledOnce().MergePull(Any[models.PullRequest](), Any[models.PullRequestOptions]()) + vcsClient.VerifyWasCalledOnce().MergePull(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.PullRequestOptions]()) } else { - vcsClient.VerifyWasCalled(Never()).MergePull(Any[models.PullRequest](), Any[models.PullRequestOptions]()) + vcsClient.VerifyWasCalled(Never()).MergePull(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.PullRequestOptions]()) } }) } @@ -819,8 +820,8 @@ func TestSimpleWorkflow_terraformLockFile(t *testing.T) { // Setup test dependencies. w := httptest.NewRecorder() - When(githubGetter.GetPullRequest(Any[models.Repo](), Any[int]())).ThenReturn(GitHubPullRequestParsed(headSHA), nil) - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int]())).ThenReturn(GitHubPullRequestParsed(headSHA), nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) // First, send the open pull request event which triggers autoplan. pullOpenedReq := GitHubPullRequestOpenedEvent(t, headSHA) @@ -880,7 +881,8 @@ func TestSimpleWorkflow_terraformLockFile(t *testing.T) { // and apply have 1 for each comment plus one for the locks deleted at the // end. - _, _, actReplies, _ := vcsClient.VerifyWasCalled(Times(2)).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetAllCapturedArguments() + _, _, _, actReplies, _ := vcsClient.VerifyWasCalled(Times(2)).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetAllCapturedArguments() Assert(t, len(c.ExpReplies) == len(actReplies), "missing expected replies, got %d but expected %d", len(actReplies), len(c.ExpReplies)) for i, expReply := range c.ExpReplies { assertCommentEquals(t, expReply, actReplies[i], c.RepoDir, false) @@ -1188,12 +1190,16 @@ func TestGitHubWorkflowWithPolicyCheck(t *testing.T) { // Setup test dependencies. w := httptest.NewRecorder() - When(vcsClient.PullIsMergeable(Any[models.Repo](), Any[models.PullRequest](), Eq("atlantis-test"))).ThenReturn(true, nil) - When(vcsClient.PullIsApproved(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(models.ApprovalStatus{ + When(vcsClient.PullIsMergeable( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq("atlantis-test"), Eq([]string{}))).ThenReturn(true, nil) + When(vcsClient.PullIsApproved( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(models.ApprovalStatus{ IsApproved: true, }, nil) - When(githubGetter.GetPullRequest(Any[models.Repo](), Any[int]())).ThenReturn(GitHubPullRequestParsed(headSHA), nil) - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(githubGetter.GetPullRequest( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int]())).ThenReturn(GitHubPullRequestParsed(headSHA), nil) + When(vcsClient.GetModifiedFiles( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) // First, send the open pull request event which triggers autoplan. pullOpenedReq := GitHubPullRequestOpenedEvent(t, headSHA) @@ -1244,7 +1250,8 @@ func TestGitHubWorkflowWithPolicyCheck(t *testing.T) { if !c.ExpPolicyChecks { expNumReplies-- } - _, _, actReplies, _ := vcsClient.VerifyWasCalled(Times(expNumReplies)).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetAllCapturedArguments() + _, _, _, actReplies, _ := vcsClient.VerifyWasCalled(Times(expNumReplies)).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetAllCapturedArguments() Assert(t, len(c.ExpReplies) == len(actReplies), "missing expected replies, got %d but expected %d", len(actReplies), len(c.ExpReplies)) for i, expReply := range c.ExpReplies { @@ -1253,9 +1260,9 @@ func TestGitHubWorkflowWithPolicyCheck(t *testing.T) { if c.ExpAutomerge { // Verify that the merge API call was made. - vcsClient.VerifyWasCalledOnce().MergePull(Any[models.PullRequest](), Any[models.PullRequestOptions]()) + vcsClient.VerifyWasCalledOnce().MergePull(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.PullRequestOptions]()) } else { - vcsClient.VerifyWasCalled(Never()).MergePull(Any[models.PullRequest](), Any[models.PullRequestOptions]()) + vcsClient.VerifyWasCalled(Never()).MergePull(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.PullRequestOptions]()) } }) } @@ -1308,7 +1315,11 @@ func setupE2E(t *testing.T, repoDir string, opt setupOption) (events_controllers ExecutableName: "atlantis", AllowCommands: allowCommands, } - terraformClient, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "", "default-tf-version", "https://releases.hashicorp.com", &NoopTFDownloader{}, true, false, projectCmdOutputHandler) + + mockDownloader := terraform_mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + + terraformClient, err := terraform.NewClient(logger, distribution, binDir, cacheDir, "", "", "", "default-tf-version", "https://releases.hashicorp.com", true, false, projectCmdOutputHandler) Ok(t, err) boltdb, err := db.New(dataDir) Ok(t, err) @@ -1324,7 +1335,6 @@ func setupE2E(t *testing.T, repoDir string, opt setupOption) (events_controllers workingDir := &events.FileWorkspace{ DataDir: dataDir, TestingOverrideHeadCloneURL: "override-me", - Logger: logger, } var preWorkflowHooks []*valid.WorkflowHook if !opt.disablePreWorkflowHooks { @@ -1416,7 +1426,6 @@ func setupE2E(t *testing.T, repoDir string, opt setupOption) (events_controllers false, "auto", statsScope, - logger, terraformClient, ) @@ -1424,7 +1433,7 @@ func setupE2E(t *testing.T, repoDir string, opt setupOption) (events_controllers Ok(t, err) - conftextExec := policy.NewConfTestExecutorWorkflow(logger, binDir, &NoopTFDownloader{}) + conftextExec := policy.NewConfTestExecutorWorkflow(logger, binDir, mock_policy.NewMockDownloader()) // swapping out version cache to something that always returns local conftest // binary @@ -1496,7 +1505,7 @@ func setupE2E(t *testing.T, repoDir string, opt setupOption) (events_controllers userConfig.QuietPolicyChecks, ) - e2ePullReqStatusFetcher := vcs.NewPullReqStatusFetcher(e2eVCSClient, "atlantis-test") + e2ePullReqStatusFetcher := vcs.NewPullReqStatusFetcher(e2eVCSClient, "atlantis-test", []string{}) planCommandRunner := events.NewPlanCommandRunner( false, @@ -1874,4 +1883,7 @@ func ensureRunning014(t *testing.T) { // // Terraform v0.11.10 // => 0.11.10 -var versionRegex = regexp.MustCompile("Terraform v(.*?)(\\s.*)?\n") +// +// OpenTofu v1.0.0 +// => 1.0.0 +var versionRegex = regexp.MustCompile("(?:Terraform|OpenTofu) v(.*?)(\\s.*)?\n") diff --git a/server/controllers/events/events_controller_test.go b/server/controllers/events/events_controller_test.go index ad23ea7ddc..bbf008cafb 100644 --- a/server/controllers/events/events_controller_test.go +++ b/server/controllers/events/events_controller_test.go @@ -25,7 +25,7 @@ import ( "strings" "testing" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/mcdafydd/go-azuredevops/azuredevops" . "github.com/petergtz/pegomock/v4" events_controllers "github.com/runatlantis/atlantis/server/controllers/events" @@ -42,6 +42,7 @@ import ( ) const githubHeader = "X-Github-Event" +const giteaHeader = "X-Gitea-Event" const gitlabHeader = "X-Gitlab-Event" const azuredevopsHeader = "Request-Id" @@ -68,6 +69,17 @@ func TestPost_UnsupportedVCSGithub(t *testing.T) { ResponseContains(t, w, http.StatusBadRequest, "Ignoring request since not configured to support GitHub") } +func TestPost_UnsupportedVCSGitea(t *testing.T) { + t.Log("when the request is for an unsupported vcs a 400 is returned") + e, _, _, _, _, _, _, _, _ := setup(t) + e.SupportedVCSHosts = nil + req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil)) + req.Header.Set(giteaHeader, "value") + w := httptest.NewRecorder() + e.Post(w, req) + ResponseContains(t, w, http.StatusBadRequest, "Ignoring request since not configured to support Gitea") +} + func TestPost_UnsupportedVCSGitlab(t *testing.T) { t.Log("when the request is for an unsupported vcs a 400 is returned") e, _, _, _, _, _, _, _, _ := setup(t) @@ -90,6 +102,17 @@ func TestPost_InvalidGithubSecret(t *testing.T) { ResponseContains(t, w, http.StatusBadRequest, "err") } +func TestPost_InvalidGiteaSecret(t *testing.T) { + t.Log("when the gitea payload can't be validated a 400 is returned") + e, v, _, _, _, _, _, _, _ := setup(t) + w := httptest.NewRecorder() + req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil)) + req.Header.Set(giteaHeader, "value") + When(v.Validate(req, secret)).ThenReturn(nil, errors.New("err")) + e.Post(w, req) + ResponseContains(t, w, http.StatusBadRequest, "request did not pass validation") +} + func TestPost_InvalidGitlabSecret(t *testing.T) { t.Log("when the gitlab payload can't be validated a 400 is returned") e, _, gl, _, _, _, _, _, _ := setup(t) @@ -112,6 +135,18 @@ func TestPost_UnsupportedGithubEvent(t *testing.T) { ResponseContains(t, w, http.StatusOK, "Ignoring unsupported event") } +func TestPost_UnsupportedGiteaEvent(t *testing.T) { + t.Log("when the event type is an unsupported gitea event we ignore it") + e, v, _, _, _, _, _, _, _ := setup(t) + w := httptest.NewRecorder() + req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil)) + req.Header.Set(giteaHeader, "value") + e.GiteaWebhookSecret = nil + When(v.Validate(req, nil)).ThenReturn([]byte(`{"not an event": ""}`), nil) + e.Post(w, req) + ResponseContains(t, w, http.StatusOK, "Ignoring unsupported Gitea event") +} + func TestPost_UnsupportedGitlabEvent(t *testing.T) { t.Log("when the event type is an unsupported gitlab event we ignore it") e, _, gl, _, _, _, _, _, _ := setup(t) @@ -155,7 +190,7 @@ func TestPost_GithubInvalidComment(t *testing.T) { req.Header.Set(githubHeader, "issue_comment") event := `{"action": "created"}` When(v.Validate(req, secret)).ThenReturn([]byte(event), nil) - When(p.ParseGithubIssueCommentEvent(Any[*github.IssueCommentEvent]())).ThenReturn(models.Repo{}, models.User{}, 1, errors.New("err")) + When(p.ParseGithubIssueCommentEvent(Any[logging.SimpleLogging](), Any[*github.IssueCommentEvent]())).ThenReturn(models.Repo{}, models.User{}, 1, errors.New("err")) w := httptest.NewRecorder() e.Post(w, req) ResponseContains(t, w, http.StatusBadRequest, "Failed parsing event") @@ -180,12 +215,12 @@ func TestPost_GithubCommentInvalidCommand(t *testing.T) { req.Header.Set(githubHeader, "issue_comment") event := `{"action": "created"}` When(v.Validate(req, secret)).ThenReturn([]byte(event), nil) - When(p.ParseGithubIssueCommentEvent(Any[*github.IssueCommentEvent]())).ThenReturn(models.Repo{}, models.User{}, 1, nil) + When(p.ParseGithubIssueCommentEvent(Any[logging.SimpleLogging](), Any[*github.IssueCommentEvent]())).ThenReturn(models.Repo{}, models.User{}, 1, nil) When(cp.Parse("", models.Github)).ThenReturn(events.CommentParseResult{Ignore: true}) w := httptest.NewRecorder() e.Post(w, req) ResponseContains(t, w, http.StatusOK, "Ignoring non-command comment: \"\"") - vcsClient.VerifyWasCalled(Never()).ReactToComment(models.Repo{}, 1, 1, "eyes") + vcsClient.VerifyWasCalled(Never()).ReactToComment(Any[logging.SimpleLogging](), Eq(models.Repo{}), Eq(1), Eq(int64(1)), Eq("eyes")) } func TestPost_GitlabCommentNotAllowlisted(t *testing.T) { @@ -211,12 +246,15 @@ func TestPost_GitlabCommentNotAllowlisted(t *testing.T) { w := httptest.NewRecorder() e.Post(w, req) - Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusForbidden, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) expRepo, _ := models.NewRepo(models.Gitlab, "gitlabhq/gitlab-test", "https://example.com/gitlabhq/gitlab-test.git", "", "") - vcsClient.VerifyWasCalledOnce().CreateComment(expRepo, 1, "```\nError: This repo is not allowlisted for Atlantis.\n```", "") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(expRepo), Eq(1), Eq("```\nError: This repo is not allowlisted for Atlantis.\n```"), Eq("")) } func TestPost_GitlabCommentNotAllowlistedWithSilenceErrors(t *testing.T) { @@ -243,11 +281,13 @@ func TestPost_GitlabCommentNotAllowlistedWithSilenceErrors(t *testing.T) { w := httptest.NewRecorder() e.Post(w, req) - Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusForbidden, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } @@ -275,12 +315,15 @@ func TestPost_GithubCommentNotAllowlisted(t *testing.T) { w := httptest.NewRecorder() e.Post(w, req) - Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusForbidden, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) expRepo, _ := models.NewRepo(models.Github, "baxterthehacker/public-repo", "https://github.com/baxterthehacker/public-repo.git", "", "") - vcsClient.VerifyWasCalledOnce().CreateComment(expRepo, 2, "```\nError: This repo is not allowlisted for Atlantis.\n```", "") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(expRepo), Eq(2), Eq("```\nError: This repo is not allowlisted for Atlantis.\n```"), Eq("")) } func TestPost_GithubCommentNotAllowlistedWithSilenceErrors(t *testing.T) { @@ -308,11 +351,13 @@ func TestPost_GithubCommentNotAllowlistedWithSilenceErrors(t *testing.T) { w := httptest.NewRecorder() e.Post(w, req) - Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusForbidden, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } func TestPost_GitlabCommentResponse(t *testing.T) { @@ -324,7 +369,7 @@ func TestPost_GitlabCommentResponse(t *testing.T) { When(cp.Parse("", models.Gitlab)).ThenReturn(events.CommentParseResult{CommentResponse: "a comment"}) w := httptest.NewRecorder() e.Post(w, req) - vcsClient.VerifyWasCalledOnce().CreateComment(models.Repo{}, 0, "a comment", "") + vcsClient.VerifyWasCalledOnce().CreateComment(Any[logging.SimpleLogging](), Eq(models.Repo{}), Eq(0), Eq("a comment"), Eq("")) ResponseContains(t, w, http.StatusOK, "Commenting back on pull request") } @@ -337,12 +382,12 @@ func TestPost_GithubCommentResponse(t *testing.T) { When(v.Validate(req, secret)).ThenReturn([]byte(event), nil) baseRepo := models.Repo{} user := models.User{} - When(p.ParseGithubIssueCommentEvent(Any[*github.IssueCommentEvent]())).ThenReturn(baseRepo, user, 1, nil) + When(p.ParseGithubIssueCommentEvent(Any[logging.SimpleLogging](), Any[*github.IssueCommentEvent]())).ThenReturn(baseRepo, user, 1, nil) When(cp.Parse("", models.Github)).ThenReturn(events.CommentParseResult{CommentResponse: "a comment"}) w := httptest.NewRecorder() e.Post(w, req) - vcsClient.VerifyWasCalledOnce().CreateComment(baseRepo, 1, "a comment", "") + vcsClient.VerifyWasCalledOnce().CreateComment(Any[logging.SimpleLogging](), Eq(baseRepo), Eq(1), Eq("a comment"), Eq("")) ResponseContains(t, w, http.StatusOK, "Commenting back on pull request") } @@ -371,7 +416,7 @@ func TestPost_GithubCommentSuccess(t *testing.T) { baseRepo := models.Repo{} user := models.User{} cmd := events.CommentCommand{} - When(p.ParseGithubIssueCommentEvent(Any[*github.IssueCommentEvent]())).ThenReturn(baseRepo, user, 1, nil) + When(p.ParseGithubIssueCommentEvent(Any[logging.SimpleLogging](), Any[*github.IssueCommentEvent]())).ThenReturn(baseRepo, user, 1, nil) When(cp.Parse("", models.Github)).ThenReturn(events.CommentParseResult{Command: &cmd}) w := httptest.NewRecorder() e.Post(w, req) @@ -391,13 +436,13 @@ func TestPost_GithubCommentReaction(t *testing.T) { baseRepo := models.Repo{} user := models.User{} cmd := events.CommentCommand{Name: command.Plan} - When(p.ParseGithubIssueCommentEvent(Any[*github.IssueCommentEvent]())).ThenReturn(baseRepo, user, 1, nil) + When(p.ParseGithubIssueCommentEvent(Any[logging.SimpleLogging](), Any[*github.IssueCommentEvent]())).ThenReturn(baseRepo, user, 1, nil) When(cp.Parse(testComment, models.Github)).ThenReturn(events.CommentParseResult{Command: &cmd}) w := httptest.NewRecorder() e.Post(w, req) ResponseContains(t, w, http.StatusOK, "Processing...") - vcsClient.VerifyWasCalledOnce().ReactToComment(baseRepo, 1, 1, "eyes") + vcsClient.VerifyWasCalledOnce().ReactToComment(Any[logging.SimpleLogging](), Eq(baseRepo), Eq(1), Eq(int64(1)), Eq("eyes")) } func TestPost_GilabCommentReaction(t *testing.T) { @@ -411,7 +456,7 @@ func TestPost_GilabCommentReaction(t *testing.T) { w := httptest.NewRecorder() e.Post(w, req) ResponseContains(t, w, http.StatusOK, "Processing...") - vcsClient.VerifyWasCalledOnce().ReactToComment(models.Repo{}, 0, 0, "eyes") + vcsClient.VerifyWasCalledOnce().ReactToComment(Any[logging.SimpleLogging](), Eq(models.Repo{}), Eq(0), Eq(int64(0)), Eq("eyes")) } func TestPost_GithubPullRequestInvalid(t *testing.T) { @@ -422,7 +467,7 @@ func TestPost_GithubPullRequestInvalid(t *testing.T) { event := `{"action": "closed"}` When(v.Validate(req, secret)).ThenReturn([]byte(event), nil) - When(p.ParseGithubPullEvent(Any[*github.PullRequestEvent]())).ThenReturn(models.PullRequest{}, models.OpenedPullEvent, models.Repo{}, models.Repo{}, models.User{}, errors.New("err")) + When(p.ParseGithubPullEvent(Any[logging.SimpleLogging](), Any[*github.PullRequestEvent]())).ThenReturn(models.PullRequest{}, models.OpenedPullEvent, models.Repo{}, models.Repo{}, models.User{}, errors.New("err")) w := httptest.NewRecorder() e.Post(w, req) ResponseContains(t, w, http.StatusBadRequest, "Error parsing pull data: err") @@ -742,8 +787,8 @@ func TestPost_GithubPullRequestClosedErrCleaningPull(t *testing.T) { When(v.Validate(req, secret)).ThenReturn([]byte(event), nil) repo := models.Repo{} pull := models.PullRequest{State: models.ClosedPullState} - When(p.ParseGithubPullEvent(Any[*github.PullRequestEvent]())).ThenReturn(pull, models.OpenedPullEvent, repo, repo, models.User{}, nil) - When(c.CleanUpPull(repo, pull)).ThenReturn(errors.New("cleanup err")) + When(p.ParseGithubPullEvent(Any[logging.SimpleLogging](), Any[*github.PullRequestEvent]())).ThenReturn(pull, models.OpenedPullEvent, repo, repo, models.User{}, nil) + When(c.CleanUpPull(Any[logging.SimpleLogging](), repo, pull)).ThenReturn(errors.New("cleanup err")) w := httptest.NewRecorder() e.Post(w, req) ResponseContains(t, w, http.StatusInternalServerError, "Error cleaning pull request: cleanup err") @@ -761,7 +806,7 @@ func TestPost_GitlabMergeRequestClosedErrCleaningPull(t *testing.T) { repo := models.Repo{} pullRequest := models.PullRequest{State: models.ClosedPullState} When(p.ParseGitlabMergeRequestEvent(event)).ThenReturn(pullRequest, models.OpenedPullEvent, repo, repo, models.User{}, nil) - When(c.CleanUpPull(repo, pullRequest)).ThenReturn(errors.New("err")) + When(c.CleanUpPull(Any[logging.SimpleLogging](), repo, pullRequest)).ThenReturn(errors.New("err")) w := httptest.NewRecorder() e.Post(w, req) ResponseContains(t, w, http.StatusInternalServerError, "Error cleaning pull request: err") @@ -778,8 +823,8 @@ func TestPost_GithubClosedPullRequestSuccess(t *testing.T) { When(v.Validate(req, secret)).ThenReturn([]byte(event), nil) repo := models.Repo{} pull := models.PullRequest{State: models.ClosedPullState} - When(p.ParseGithubPullEvent(Any[*github.PullRequestEvent]())).ThenReturn(pull, models.OpenedPullEvent, repo, repo, models.User{}, nil) - When(c.CleanUpPull(repo, pull)).ThenReturn(nil) + When(p.ParseGithubPullEvent(Any[logging.SimpleLogging](), Any[*github.PullRequestEvent]())).ThenReturn(pull, models.OpenedPullEvent, repo, repo, models.User{}, nil) + When(c.CleanUpPull(Any[logging.SimpleLogging](), repo, pull)).ThenReturn(nil) w := httptest.NewRecorder() e.Post(w, req) ResponseContains(t, w, http.StatusOK, "Pull request cleaned successfully") @@ -867,16 +912,18 @@ func TestPost_BBServerPullClosed(t *testing.T) { Type: models.BitbucketServer, }, } - pullCleaner.VerifyWasCalledOnce().CleanUpPull(expRepo, models.PullRequest{ - Num: 10, - HeadCommit: "2d9fb6b9a46eafb1dcef7b008d1a429d45ca742c", - URL: "https://bbserver.com/projects/PROJ/repos/repository/pull-requests/10", - HeadBranch: "decline-me", - BaseBranch: "main", - Author: "admin", - State: models.OpenPullState, - BaseRepo: expRepo, - }) + pullCleaner.VerifyWasCalledOnce().CleanUpPull( + logger, + expRepo, models.PullRequest{ + Num: 10, + HeadCommit: "2d9fb6b9a46eafb1dcef7b008d1a429d45ca742c", + URL: "https://bbserver.com/projects/PROJ/repos/repository/pull-requests/10", + HeadBranch: "decline-me", + BaseBranch: "main", + Author: "admin", + State: models.OpenPullState, + BaseRepo: expRepo, + }) }) } } @@ -931,7 +978,7 @@ func TestPost_PullOpenedOrUpdated(t *testing.T) { When(v.Validate(req, secret)).ThenReturn([]byte(event), nil) repo = models.Repo{} pullRequest = models.PullRequest{State: models.ClosedPullState} - When(p.ParseGithubPullEvent(Any[*github.PullRequestEvent]())).ThenReturn(pullRequest, models.OpenedPullEvent, repo, repo, models.User{}, nil) + When(p.ParseGithubPullEvent(Any[logging.SimpleLogging](), Any[*github.PullRequestEvent]())).ThenReturn(pullRequest, models.OpenedPullEvent, repo, repo, models.User{}, nil) } w := httptest.NewRecorder() @@ -972,7 +1019,8 @@ func setup(t *testing.T) (events_controllers.VCSEventsController, *mocks.MockGit CommandRunner: cr, PullCleaner: c, GithubWebhookSecret: secret, - SupportedVCSHosts: []models.VCSHostType{models.Github, models.Gitlab, models.AzureDevops}, + SupportedVCSHosts: []models.VCSHostType{models.Github, models.Gitlab, models.AzureDevops, models.Gitea}, + GiteaWebhookSecret: secret, GitlabWebhookSecret: secret, GitlabRequestParserValidator: gl, RepoAllowlistChecker: repoAllowlistChecker, diff --git a/server/controllers/events/github_request_validator.go b/server/controllers/events/github_request_validator.go index 6caf0c4f82..30d9512b12 100644 --- a/server/controllers/events/github_request_validator.go +++ b/server/controllers/events/github_request_validator.go @@ -19,7 +19,7 @@ import ( "io" "net/http" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" ) //go:generate pegomock generate --package mocks -o mocks/mock_github_request_validator.go GithubRequestValidator diff --git a/server/controllers/events/testdata/null_provider_lockfile_old_version b/server/controllers/events/testdata/null_provider_lockfile_old_version index 9af6a64b26..c4ebbc4fde 100644 --- a/server/controllers/events/testdata/null_provider_lockfile_old_version +++ b/server/controllers/events/testdata/null_provider_lockfile_old_version @@ -2,20 +2,24 @@ # Manual edits may be lost in future updates. provider "registry.terraform.io/hashicorp/null" { - version = "3.1.0" - constraints = "3.1.0" + version = "3.2.3" + constraints = "3.2.3" hashes = [ - "h1:grYDj8/Lvp1OwME+g1AsECPN1czO5ssSf+8fCluCHQY=", - "zh:02a1675fd8de126a00460942aaae242e65ca3380b5bb192e8773ef3da9073fd2", - "zh:53e30545ff8926a8e30ad30648991ca8b93b6fa496272cd23b26763c8ee84515", - "zh:5f9200bf708913621d0f6514179d89700e9aa3097c77dac730e8ba6e5901d521", - "zh:9ebf4d9704faba06b3ec7242c773c0fbfe12d62db7d00356d4f55385fc69bfb2", - "zh:a6576c81adc70326e4e1c999c04ad9ca37113a6e925aefab4765e5a5198efa7e", - "zh:a8a42d13346347aff6c63a37cda9b2c6aa5cc384a55b2fe6d6adfa390e609c53", - "zh:c797744d08a5307d50210e0454f91ca4d1c7621c68740441cf4579390452321d", - "zh:cecb6a304046df34c11229f20a80b24b1603960b794d68361a67c5efe58e62b8", - "zh:e1371aa1e502000d9974cfaff5be4cfa02f47b17400005a16f14d2ef30dc2a70", - "zh:fc39cc1fe71234a0b0369d5c5c7f876c71b956d23d7d6f518289737a001ba69b", - "zh:fea4227271ebf7d9e2b61b89ce2328c7262acd9fd190e1fd6d15a591abfa848e", + "h1:+AnORRgFbRO6qqcfaQyeX80W0eX3VmjadjnUFUJTiXo=", + "h1:I0Um8UkrMUb81Fxq/dxbr3HLP2cecTH2WMJiwKSrwQY=", + "h1:nKUqWEza6Lcv3xRlzeiRQrHtqvzX1BhIzjaOVXRYQXQ=", + "h1:obXguGZUWtNAO09f1f9Cb7hsPCOGXuGdN8bn/ohKRBQ=", + "zh:22d062e5278d872fe7aed834f5577ba0a5afe34a3bdac2b81f828d8d3e6706d2", + "zh:23dead00493ad863729495dc212fd6c29b8293e707b055ce5ba21ee453ce552d", + "zh:28299accf21763ca1ca144d8f660688d7c2ad0b105b7202554ca60b02a3856d3", + "zh:55c9e8a9ac25a7652df8c51a8a9a422bd67d784061b1de2dc9fe6c3cb4e77f2f", + "zh:756586535d11698a216291c06b9ed8a5cc6a4ec43eee1ee09ecd5c6a9e297ac1", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:9d5eea62fdb587eeb96a8c4d782459f4e6b73baeece4d04b4a40e44faaee9301", + "zh:a6355f596a3fb8fc85c2fb054ab14e722991533f87f928e7169a486462c74670", + "zh:b5a65a789cff4ada58a5baffc76cb9767dc26ec6b45c00d2ec8b1b027f6db4ed", + "zh:db5ab669cf11d0e9f81dc380a6fdfcac437aea3d69109c7aef1a5426639d2d65", + "zh:de655d251c470197bcbb5ac45d289595295acb8f829f6c781d4a75c8c8b7c7dd", + "zh:f5c68199f2e6076bce92a12230434782bf768103a427e9bb9abee99b116af7b5", ] } diff --git a/server/controllers/events/testdata/test-repos/automerge/dir1/versions.tf b/server/controllers/events/testdata/test-repos/automerge/dir1/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/automerge/dir1/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/automerge/dir2/versions.tf b/server/controllers/events/testdata/test-repos/automerge/dir2/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/automerge/dir2/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt index c32ed6dfdc..8f32ac5efc 100644 --- a/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff @@ -20,10 +21,14 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -43,17 +48,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` + ```shell + atlantis plan -d dir2 + ``` --- ### Plan Summary 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf index 2aa6a6437d..231579dd90 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy1" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/versions.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/versions.tf new file mode 100644 index 0000000000..59b68c1d09 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf index 5292f29c85..97f93c35e1 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy2" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/versions.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/versions.tf new file mode 100644 index 0000000000..59b68c1d09 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt index 7f0f5f45a8..d49fde3e8f 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` <details><summary>Show Output</summary> @@ -21,18 +22,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` -</details> + ```shell + atlantis plan -d dir1 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -54,18 +58,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` -</details> + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -73,7 +80,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt index 04f87516ab..0e6c0c960c 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` \ No newline at end of file + ```shell + atlantis plan -d dir1 + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt index c9a7d87124..f94c8567ed 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff @@ -14,10 +15,14 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -38,18 +43,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` -</details> + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -57,7 +65,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 1 with changes, 1 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt index 8fcbeaa757..ddcccae10a 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.for_each["default"] will be created @@ -28,22 +27,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 2 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 2 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt index d7957913db..32680f595f 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt index 45b02dd35f..1e1caabfca 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt index 46a378158b..379d9e8ce7 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt @@ -8,13 +8,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=overridden` + ```shell + atlantis plan -d . -- -var var=overridden + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf b/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf index f7bf7839d0..082a9534c0 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf @@ -1,12 +1,10 @@ resource "random_id" "for_each" { for_each = toset([var.var]) - keepers = {} byte_length = 1 } resource "random_id" "count" { count = 1 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/versions.tf b/server/controllers/events/testdata/test-repos/import-single-project-var/versions.tf new file mode 100644 index 0000000000..59b68c1d09 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt index 5662b98336..9c9fa29474 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.dummy2 will be created @@ -28,22 +27,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 2 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 2 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt index 1823a29537..f4a6cb37d9 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt index d515857ff1..9ab2dbb7e3 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt index adc09b4a37..a7268e38f8 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt @@ -8,13 +8,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project/main.tf b/server/controllers/events/testdata/test-repos/import-single-project/main.tf index 2e60a118f5..0a1884fe5e 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/main.tf +++ b/server/controllers/events/testdata/test-repos/import-single-project/main.tf @@ -1,9 +1,7 @@ resource "random_id" "dummy1" { - keepers = {} byte_length = 1 } resource "random_id" "dummy2" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-single-project/versions.tf b/server/controllers/events/testdata/test-repos/import-single-project/versions.tf new file mode 100644 index 0000000000..59b68c1d09 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-single-project/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf b/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf index de0cb8d4a2..0bc18fe1e3 100644 --- a/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf @@ -1,14 +1,12 @@ resource "random_id" "dummy1" { count = terraform.workspace == "ops" ? 1 : 0 - keepers = {} byte_length = 1 } resource "random_id" "dummy2" { count = terraform.workspace == "ops" ? 1 : 0 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-workspace/dir1/versions.tf b/server/controllers/events/testdata/test-repos/import-workspace/dir1/versions.tf new file mode 100644 index 0000000000..59b68c1d09 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-workspace/dir1/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt index 99e0e3434f..38f283b20e 100644 --- a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` + ```shell + atlantis plan -p dir1-ops + ``` diff --git a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt index 3f168d91b3..cd4659c0b7 100644 --- a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt +++ b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` + ```shell + atlantis plan -p dir1-ops + ``` diff --git a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt index 9859fcbc1f..7edca86268 100644 --- a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt @@ -8,13 +8,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p dir1-ops` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p dir1-ops + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` + ```shell + atlantis plan -p dir1-ops + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt index 1e55d623b5..43a1815d11 100644 --- a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `staging` workspace: `default` 1. dir: `production` workspace: `default` +--- ### 1. dir: `staging` workspace: `default` <details><summary>Show Output</summary> @@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "staging" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` -</details> + ```shell + atlantis plan -d staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "production" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production` -</details> + ```shell + atlantis plan -d production + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt index f08e2c50ae..298d515d93 100644 --- a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt +++ b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt @@ -20,8 +20,12 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production` + ```shell + atlantis plan -d production + ``` diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt index de773736db..9f8399b7f1 100644 --- a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt +++ b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt @@ -20,8 +20,12 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` + ```shell + atlantis plan -d staging + ``` diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/modules/null/versions.tf b/server/controllers/events/testdata/test-repos/modules-yaml/modules/null/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/modules-yaml/modules/null/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt b/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt index d1faf53fc8..c3bdadc019 100644 --- a/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt +++ b/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "staging" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` -</details> + ```shell + atlantis plan -d staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt index 19246ade07..13d2414f3f 100644 --- a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt +++ b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "production" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production` -</details> + ```shell + atlantis plan -d production + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt index d1faf53fc8..c3bdadc019 100644 --- a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt +++ b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "staging" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` -</details> + ```shell + atlantis plan -d staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/modules/modules/null/versions.tf b/server/controllers/events/testdata/test-repos/modules/modules/null/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/modules/modules/null/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt index e6643f8ce3..107a689278 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt @@ -1,6 +1,7 @@ Ran Approve Policies for 1 projects: 1. dir: `.` workspace: `default` +--- ### 1. dir: `.` workspace: `default` **Approve Policies Failed**: One or more policy sets require additional approval. @@ -9,15 +10,25 @@ Ran Approve Policies for 1 projects: policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt index f366769233..c8b5da50dd 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt @@ -25,15 +25,24 @@ post-conftest output policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt index d6e39f260d..b842f99682 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt @@ -1,6 +1,7 @@ Ran Approve Policies for 1 projects: 1. dir: `.` workspace: `default` +--- ### 1. dir: `.` workspace: `default` **Approve Policies Error** @@ -15,15 +16,25 @@ Ran Approve Policies for 1 projects: policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt index c7f45c85f5..669b9cb064 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - null_resource_policy - WARNING: Null Resource crea policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir1/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir1/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir1/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir2/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir2/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir2/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt index eb6bda8987..7e0bd72a67 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt @@ -2,6 +2,7 @@ Ran Apply for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt index c292c651f3..944cd1ba56 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt @@ -2,6 +2,7 @@ Ran Policy Check for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` #### Policy Set: `test_policy` @@ -13,10 +14,14 @@ Ran Policy Check for 2 projects: * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -35,15 +40,25 @@ FAIL - <redacted plan file> - main - WARNING: Forbidden Resource creation is pro policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d dir2` + ```shell + atlantis plan -d dir2 + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt index 098c4eba93..e01442f671 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` <details><summary>Show Output</summary> @@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` -</details> + ```shell + atlantis plan -d dir1 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` -</details> + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt index ea7d4bf3ec..0fe7b1646b 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt @@ -9,13 +9,21 @@ state, without changing any real infrastructure. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - <redacted plan file> - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt index a136ff9691..4b78a636d5 100644 --- a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt +++ b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt @@ -2,6 +2,7 @@ Ran Apply for 2 projects: 1. dir: `infrastructure/production` workspace: `default` 1. dir: `infrastructure/staging` workspace: `default` +--- ### 1. dir: `infrastructure/production` workspace: `default` ```diff diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt index 29f5f76dae..8bf40fc657 100644 --- a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `infrastructure/staging` workspace: `default` 1. dir: `infrastructure/production` workspace: `default` +--- ### 1. dir: `infrastructure/staging` workspace: `default` ```diff @@ -20,10 +21,14 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d infrastructure/staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d infrastructure/staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d infrastructure/staging` + ```shell + atlantis plan -d infrastructure/staging + ``` --- ### 2. dir: `infrastructure/production` workspace: `default` @@ -43,17 +48,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d infrastructure/production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d infrastructure/production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d infrastructure/production` + ```shell + atlantis plan -d infrastructure/production + ``` --- ### Plan Summary 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/production/versions.tf b/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/production/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/production/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/staging/versions.tf b/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/staging/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/staging/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt index ad9591b8ae..37e78c18af 100644 --- a/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `.` workspace: `default` 1. dir: `.` workspace: `staging` +--- ### 1. dir: `.` workspace: `default` <details><summary>Show Output</summary> @@ -28,13 +29,17 @@ Changes to Outputs: postplan custom ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -61,13 +66,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "staging" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -w staging` -</details> + ```shell + atlantis plan -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -75,7 +84,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/server-side-cfg/versions.tf b/server/controllers/events/testdata/test-repos/server-side-cfg/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/server-side-cfg/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt index 6e70ac89fb..822531032c 100644 --- a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "default" + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt index 6e70ac89fb..822531032c 100644 --- a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "default" + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple-with-lockfile/versions.tf b/server/controllers/events/testdata/test-repos/simple-with-lockfile/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/simple-with-lockfile/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt index 61eac2271a..4e757a396c 100644 --- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt +++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt @@ -2,6 +2,7 @@ Ran Apply for 2 projects: 1. dir: `.` workspace: `default` 1. dir: `.` workspace: `staging` +--- ### 1. dir: `.` workspace: `default` ```diff diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt index dcbb45bf78..c445925f6c 100644 --- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `.` workspace: `default` 1. dir: `.` workspace: `staging` +--- ### 1. dir: `.` workspace: `default` <details><summary>Show Output</summary> @@ -29,13 +30,17 @@ Changes to Outputs: postplan ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -60,13 +65,17 @@ Changes to Outputs: + var = "fromfile" + workspace = "staging" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -w staging` -</details> + ```shell + atlantis plan -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -74,7 +83,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt index f0419c9189..b944f4deab 100644 --- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt +++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt @@ -25,17 +25,25 @@ Changes to Outputs: postplan ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt index 7e34016bab..64880424f6 100644 --- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt +++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt @@ -20,17 +20,25 @@ Changes to Outputs: + var = "fromfile" + workspace = "staging" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -w staging` -</details> + ```shell + atlantis plan -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/versions.tf b/server/controllers/events/testdata/test-repos/simple-yaml/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/simple-yaml/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt index 2977099b55..cb7dd9a752 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt @@ -2,6 +2,7 @@ Ran Apply for 2 projects: 1. dir: `.` workspace: `default` 1. dir: `.` workspace: `new_workspace` +--- ### 1. dir: `.` workspace: `default` <details><summary>Show Output</summary> diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt index 242515e415..13bdae3fac 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "new_workspace" + workspace = "new_workspace" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w new_workspace` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -w new_workspace + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -w new_workspace -- -var var=new_workspace` -</details> + ```shell + atlantis plan -w new_workspace -- -var var=new_workspace + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt index 5a86cff0c9..ab28d0ca84 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "overridden" + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=overridden` -</details> + ```shell + atlantis plan -d . -- -var var=overridden + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt index 54e5505476..191b540b63 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "default_workspace" + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=default_workspace` -</details> + ```shell + atlantis plan -d . -- -var var=default_workspace + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt index c6e0823922..fd03e48aed 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt @@ -5,13 +5,21 @@ Ran Policy Check for dir: `.` workspace: `default` ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt index 6e70ac89fb..822531032c 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "default" + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple/versions.tf b/server/controllers/events/testdata/test-repos/simple/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/simple/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf index 1af2266d40..0c4b79e3f8 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/versions.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/versions.tf new file mode 100644 index 0000000000..59b68c1d09 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf index 1af2266d40..0c4b79e3f8 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/versions.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/versions.tf new file mode 100644 index 0000000000..59b68c1d09 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt index 49c4dc2673..1de0174378 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` <details><summary>Show Output</summary> @@ -21,18 +22,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` -</details> + ```shell + atlantis plan -d dir1 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -54,18 +58,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` -</details> + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -73,7 +80,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt index 45b6c1ed55..8d98fee1d7 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` \ No newline at end of file + ```shell + atlantis plan -d dir1 + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt index 7a28ec5e85..e6bef5251a 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` \ No newline at end of file + ```shell + atlantis plan -d dir2 + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt index 49c4dc2673..1de0174378 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` <details><summary>Show Output</summary> @@ -21,18 +22,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` -</details> + ```shell + atlantis plan -d dir1 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -54,18 +58,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` -</details> + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -73,7 +80,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt index fb3cfdbbd7..d74495004a 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff @@ -14,10 +15,14 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -31,17 +36,25 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` + ```shell + atlantis plan -d dir2 + ``` --- ### Plan Summary 2 projects, 0 with changes, 2 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt index 3c8e0eb0bb..973455d73c 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt @@ -2,6 +2,7 @@ Ran State for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff @@ -12,7 +13,9 @@ Successfully removed 1 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -24,6 +27,8 @@ Successfully removed 1 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` + ```shell + atlantis plan -d dir2 + ``` --- \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt index 077f989d9a..3728b1b223 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.for_each["default"] will be created @@ -28,7 +27,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.simple will be created @@ -39,22 +37,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 3 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -</details> + ```shell + atlantis plan -d . + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt index d7957913db..32680f595f 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt index 284c8e2457..982e937496 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt index 1f17baa2d7..be74444839 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt index edb4c17579..288ee1df89 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.for_each["overridden"] will be created @@ -28,7 +27,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.simple will be created @@ -39,22 +37,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 3 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=overridden` -</details> + ```shell + atlantis plan -d . -- -var var=overridden + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt index 46a378158b..379d9e8ce7 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt @@ -8,13 +8,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=overridden` + ```shell + atlantis plan -d . -- -var var=overridden + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt index 264b5f2881..a6f0f97cce 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt @@ -8,4 +8,6 @@ Successfully removed 1 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt index a0d1b54717..0848fc65e8 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt @@ -9,4 +9,6 @@ Successfully removed 2 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf b/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf index d434ac8645..05e52a00b2 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf @@ -1,17 +1,14 @@ resource "random_id" "simple" { - keepers = {} byte_length = 1 } resource "random_id" "for_each" { for_each = toset([var.var]) - keepers = {} byte_length = 1 } resource "random_id" "count" { count = 1 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/versions.tf b/server/controllers/events/testdata/test-repos/state-rm-single-project/versions.tf new file mode 100644 index 0000000000..59b68c1d09 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf index 353cb66e31..3056320d04 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf @@ -1,7 +1,6 @@ resource "random_id" "dummy1" { count = terraform.workspace == "ops" ? 1 : 0 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/versions.tf b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/versions.tf new file mode 100644 index 0000000000..59b68c1d09 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt index a6a1dbbfaa..b81ff32704 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` \ No newline at end of file + ```shell + atlantis plan -p dir1-ops + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt index b24ee90b20..632b3cf24c 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt @@ -17,22 +17,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p dir1-ops` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p dir1-ops + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` -</details> + ```shell + atlantis plan -p dir1-ops + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt index 4c73caa512..3beeb14cab 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt @@ -10,13 +10,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p dir1-ops` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p dir1-ops + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` + ```shell + atlantis plan -p dir1-ops + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt index 5aa99db217..8c63577a49 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt @@ -8,4 +8,6 @@ Successfully removed 1 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` \ No newline at end of file + ```shell + atlantis plan -p dir1-ops + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt index 20be38a244..cf3378bc59 100644 --- a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt @@ -20,17 +20,25 @@ Changes to Outputs: + var = "default" + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p default` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p default + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p default` -</details> + ```shell + atlantis plan -p default + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt index e34c9bc2dd..efad85de0e 100644 --- a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt @@ -20,17 +20,25 @@ Changes to Outputs: + var = "staging" + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p staging` -</details> + ```shell + atlantis plan -p staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/versions.tf b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt index 82ce193d9f..75c4320f96 100644 --- a/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. project: `default` dir: `.` workspace: `default` 1. project: `staging` dir: `.` workspace: `default` +--- ### 1. project: `default` dir: `.` workspace: `default` <details><summary>Show Output</summary> @@ -26,13 +27,17 @@ Changes to Outputs: workspace=default ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p default` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p default + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p default` -</details> + ```shell + atlantis plan -p default + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -57,13 +62,17 @@ Changes to Outputs: + var = "staging" + workspace = "default" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p staging` -</details> + ```shell + atlantis plan -p staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -71,7 +80,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml/versions.tf b/server/controllers/events/testdata/test-repos/tfvars-yaml/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt index cd4e8e0b95..986241f599 100644 --- a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt +++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `production` workspace: `production` 1. dir: `staging` workspace: `staging` +--- ### 1. dir: `production` workspace: `production` <details><summary>Show Output</summary> @@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "production" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production -w production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production -w production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production -w production` -</details> + ```shell + atlantis plan -d production -w production + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "staging" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging -w staging` -</details> + ```shell + atlantis plan -d staging -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt index cd4e8e0b95..986241f599 100644 --- a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt +++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `production` workspace: `production` 1. dir: `staging` workspace: `staging` +--- ### 1. dir: `production` workspace: `production` <details><summary>Show Output</summary> @@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "production" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production -w production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production -w production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production -w production` -</details> + ```shell + atlantis plan -d production -w production + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "staging" ``` +</details> * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging -w staging` -</details> + ```shell + atlantis plan -d staging -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/production/versions.tf b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/production/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/production/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/staging/versions.tf b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/staging/versions.tf new file mode 100644 index 0000000000..ccc71862f2 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/staging/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "3.2.3" + } + } +} diff --git a/server/controllers/github_app_controller.go b/server/controllers/github_app_controller.go index 670f13351c..f6c72dc70b 100644 --- a/server/controllers/github_app_controller.go +++ b/server/controllers/github_app_controller.go @@ -6,7 +6,7 @@ import ( "net/http" "net/url" - "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/web_templates" "github.com/runatlantis/atlantis/server/events/vcs" "github.com/runatlantis/atlantis/server/logging" ) @@ -56,13 +56,14 @@ func (g *GithubAppController) ExchangeCode(w http.ResponseWriter, r *http.Reques g.Logger.Debug("Exchanging GitHub app code for app credentials") creds := &vcs.GithubAnonymousCredentials{} config := vcs.GithubConfig{} - client, err := vcs.NewGithubClient(g.GithubHostname, creds, config, g.Logger) + // This client does not post comments, so we don't need to configure it with maxCommentsPerCommand. + client, err := vcs.NewGithubClient(g.GithubHostname, creds, config, 0, g.Logger) if err != nil { g.respond(w, logging.Error, http.StatusInternalServerError, "Failed to exchange code for github app: %s", err) return } - app, err := client.ExchangeCode(code) + app, err := client.ExchangeCode(g.Logger, code) if err != nil { g.respond(w, logging.Error, http.StatusInternalServerError, "Failed to exchange code for github app: %s", err) return @@ -70,7 +71,7 @@ func (g *GithubAppController) ExchangeCode(w http.ResponseWriter, r *http.Reques g.Logger.Debug("Found credentials for GitHub app %q with id %d", app.Name, app.ID) - err = templates.GithubAppSetupTemplate.Execute(w, templates.GithubSetupData{ + err = web_templates.GithubAppSetupTemplate.Execute(w, web_templates.GithubSetupData{ Target: "", Manifest: "", ID: app.ID, @@ -122,6 +123,7 @@ func (g *GithubAppController) New(w http.ResponseWriter, _ *http.Request) { "statuses": "write", "administration": "read", "members": "read", + "actions": "read", }, } @@ -142,7 +144,7 @@ func (g *GithubAppController) New(w http.ResponseWriter, _ *http.Request) { return } - err = templates.GithubAppSetupTemplate.Execute(w, templates.GithubSetupData{ + err = web_templates.GithubAppSetupTemplate.Execute(w, web_templates.GithubSetupData{ Target: url.String(), Manifest: string(jsonManifest), }) diff --git a/server/controllers/jobs_controller.go b/server/controllers/jobs_controller.go index 0363977944..2f27e32b97 100644 --- a/server/controllers/jobs_controller.go +++ b/server/controllers/jobs_controller.go @@ -6,7 +6,7 @@ import ( "net/url" "github.com/gorilla/mux" - "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/web_templates" "github.com/runatlantis/atlantis/server/controllers/websocket" "github.com/runatlantis/atlantis/server/core/locking" "github.com/runatlantis/atlantis/server/logging" @@ -29,8 +29,8 @@ type JobsController struct { AtlantisVersion string AtlantisURL *url.URL Logger logging.SimpleLogging - ProjectJobsTemplate templates.TemplateWriter - ProjectJobsErrorTemplate templates.TemplateWriter + ProjectJobsTemplate web_templates.TemplateWriter + ProjectJobsErrorTemplate web_templates.TemplateWriter Backend locking.Backend WsMux *websocket.Multiplexor KeyGenerator JobIDKeyGenerator @@ -41,11 +41,11 @@ func (j *JobsController) getProjectJobs(w http.ResponseWriter, r *http.Request) jobID, err := j.KeyGenerator.Generate(r) if err != nil { - j.respond(w, logging.Error, http.StatusBadRequest, err.Error()) + j.respond(w, logging.Error, http.StatusBadRequest, "%s", err.Error()) return err } - viewData := templates.ProjectJobData{ + viewData := web_templates.ProjectJobData{ AtlantisVersion: j.AtlantisVersion, ProjectPath: jobID, CleanedBasePath: j.AtlantisURL.Path, @@ -67,7 +67,7 @@ func (j *JobsController) getProjectJobsWS(w http.ResponseWriter, r *http.Request err := j.WsMux.Handle(w, r) if err != nil { - j.respond(w, logging.Error, http.StatusInternalServerError, err.Error()) + j.respond(w, logging.Error, http.StatusInternalServerError, "%s", err.Error()) return err } diff --git a/server/controllers/locks_controller.go b/server/controllers/locks_controller.go index 85a4420430..27b330c8b3 100644 --- a/server/controllers/locks_controller.go +++ b/server/controllers/locks_controller.go @@ -5,7 +5,7 @@ import ( "net/http" "net/url" - "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/web_templates" "github.com/gorilla/mux" "github.com/runatlantis/atlantis/server/core/locking" @@ -23,7 +23,7 @@ type LocksController struct { Logger logging.SimpleLogging ApplyLocker locking.ApplyLocker VCSClient vcs.Client - LockDetailTemplate templates.TemplateWriter + LockDetailTemplate web_templates.TemplateWriter WorkingDir events.WorkingDir WorkingDirLocker events.WorkingDirLocker Backend locking.Backend @@ -73,12 +73,12 @@ func (l *LocksController) GetLock(w http.ResponseWriter, r *http.Request) { return } if lock == nil { - l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id %q", idUnencoded) + l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id '%s'", idUnencoded) return } owner, repo := models.SplitRepoFullName(lock.Project.RepoFullName) - viewData := templates.LockDetailData{ + viewData := web_templates.LockDetailData{ LockKeyEncoded: id, LockKey: idUnencoded, PullRequestLink: lock.Pull.URL, @@ -107,18 +107,18 @@ func (l *LocksController) DeleteLock(w http.ResponseWriter, r *http.Request) { idUnencoded, err := url.PathUnescape(id) if err != nil { - l.respond(w, logging.Warn, http.StatusBadRequest, "Invalid lock id %q. Failed with error: %s", id, err) + l.respond(w, logging.Warn, http.StatusBadRequest, "Invalid lock id '%s'. Failed with error: '%s'", id, err) return } - lock, err := l.DeleteLockCommand.DeleteLock(idUnencoded) + lock, err := l.DeleteLockCommand.DeleteLock(l.Logger, idUnencoded) if err != nil { - l.respond(w, logging.Error, http.StatusInternalServerError, "deleting lock failed with: %s", err) + l.respond(w, logging.Error, http.StatusInternalServerError, "deleting lock failed with: '%s'", err) return } if lock == nil { - l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id %q", idUnencoded) + l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id '%s'", idUnencoded) return } @@ -133,13 +133,13 @@ func (l *LocksController) DeleteLock(w http.ResponseWriter, r *http.Request) { // Once the lock has been deleted, comment back on the pull request. comment := fmt.Sprintf("**Warning**: The plan for dir: `%s` workspace: `%s` was **discarded** via the Atlantis UI.\n\n"+ "To `apply` this plan you must run `plan` again.", lock.Project.Path, lock.Workspace) - if err = l.VCSClient.CreateComment(lock.Pull.BaseRepo, lock.Pull.Num, comment, ""); err != nil { + if err = l.VCSClient.CreateComment(l.Logger, lock.Pull.BaseRepo, lock.Pull.Num, comment, ""); err != nil { l.Logger.Warn("failed commenting on pull request: %s", err) } } else { l.Logger.Debug("skipping commenting on pull request and deleting workspace because BaseRepo field is empty") } - l.respond(w, logging.Info, http.StatusOK, "Deleted lock id %q", id) + l.respond(w, logging.Info, http.StatusOK, "Deleted lock id '%s'", id) } // respond is a helper function to respond and log the response. lvl is the log diff --git a/server/controllers/locks_controller_test.go b/server/controllers/locks_controller_test.go index 88e538d15d..d878b34e33 100644 --- a/server/controllers/locks_controller_test.go +++ b/server/controllers/locks_controller_test.go @@ -11,8 +11,8 @@ import ( "time" "github.com/runatlantis/atlantis/server/controllers" - "github.com/runatlantis/atlantis/server/controllers/templates" - tMocks "github.com/runatlantis/atlantis/server/controllers/templates/mocks" + "github.com/runatlantis/atlantis/server/controllers/web_templates" + tMocks "github.com/runatlantis/atlantis/server/controllers/web_templates/mocks" "github.com/runatlantis/atlantis/server/core/db" "github.com/runatlantis/atlantis/server/core/locking" @@ -159,7 +159,7 @@ func TestGetLock_None(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.GetLock(w, req) - ResponseContains(t, w, http.StatusNotFound, "No lock found at id \"id\"") + ResponseContains(t, w, http.StatusNotFound, "No lock found at id 'id'") } func TestGetLock_Success(t *testing.T) { @@ -185,7 +185,7 @@ func TestGetLock_Success(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.GetLock(w, req) - tmpl.VerifyWasCalledOnce().Execute(w, templates.LockDetailData{ + tmpl.VerifyWasCalledOnce().Execute(w, web_templates.LockDetailData{ LockKeyEncoded: "id", LockKey: "id", RepoOwner: "owner", @@ -215,14 +215,14 @@ func TestDeleteLock_InvalidLockID(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "%A@"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusBadRequest, "Invalid lock id \"%A@\"") + ResponseContains(t, w, http.StatusBadRequest, "Invalid lock id '%A@'") } func TestDeleteLock_LockerErr(t *testing.T) { t.Log("If there is an error retrieving the lock, a 500 is returned") RegisterMockTestingT(t) dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(nil, errors.New("err")) + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(nil, errors.New("err")) lc := controllers.LocksController{ DeleteLockCommand: dlc, Logger: logging.NewNoopLogger(t), @@ -238,7 +238,7 @@ func TestDeleteLock_None(t *testing.T) { t.Log("If there is no lock at that ID we get a 404") RegisterMockTestingT(t) dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(nil, nil) + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(nil, nil) lc := controllers.LocksController{ DeleteLockCommand: dlc, Logger: logging.NewNoopLogger(t), @@ -247,7 +247,7 @@ func TestDeleteLock_None(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusNotFound, "No lock found at id \"id\"") + ResponseContains(t, w, http.StatusNotFound, "No lock found at id 'id'") } func TestDeleteLock_OldFormat(t *testing.T) { @@ -255,7 +255,7 @@ func TestDeleteLock_OldFormat(t *testing.T) { RegisterMockTestingT(t) cp := vcsmocks.NewMockClient() dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{}, nil) + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{}, nil) lc := controllers.LocksController{ DeleteLockCommand: dlc, Logger: logging.NewNoopLogger(t), @@ -265,8 +265,8 @@ func TestDeleteLock_OldFormat(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") - cp.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'") + cp.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } func TestDeleteLock_UpdateProjectStatus(t *testing.T) { @@ -284,7 +284,7 @@ func TestDeleteLock_UpdateProjectStatus(t *testing.T) { pull := models.PullRequest{ BaseRepo: models.Repo{FullName: repoName}, } - When(l.DeleteLock("id")).ThenReturn(&models.ProjectLock{ + When(l.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{ Pull: pull, Workspace: workspaceName, Project: models.Project{ @@ -321,7 +321,7 @@ func TestDeleteLock_UpdateProjectStatus(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") + ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'") status, err := backend.GetPullStatus(pull) Ok(t, err) Assert(t, status.Projects != nil, "status projects was nil") @@ -338,7 +338,7 @@ func TestDeleteLock_CommentFailed(t *testing.T) { t.Log("If the commenting fails we still return success") RegisterMockTestingT(t) dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{ + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{ Pull: models.PullRequest{ BaseRepo: models.Repo{FullName: "owner/repo"}, }, @@ -350,7 +350,7 @@ func TestDeleteLock_CommentFailed(t *testing.T) { tmp := t.TempDir() backend, err := db.New(tmp) Ok(t, err) - When(cp.CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]())).ThenReturn(errors.New("err")) + When(cp.CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]())).ThenReturn(errors.New("err")) lc := controllers.LocksController{ DeleteLockCommand: dlc, Logger: logging.NewNoopLogger(t), @@ -363,7 +363,7 @@ func TestDeleteLock_CommentFailed(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") + ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'") } func TestDeleteLock_CommentSuccess(t *testing.T) { @@ -380,7 +380,7 @@ func TestDeleteLock_CommentSuccess(t *testing.T) { pull := models.PullRequest{ BaseRepo: models.Repo{FullName: "owner/repo"}, } - When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{ + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{ Pull: pull, Workspace: "workspace", Project: models.Project{ @@ -400,8 +400,8 @@ func TestDeleteLock_CommentSuccess(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") - cp.VerifyWasCalled(Once()).CreateComment(pull.BaseRepo, pull.Num, - "**Warning**: The plan for dir: `path` workspace: `workspace` was **discarded** via the Atlantis UI.\n\n"+ - "To `apply` this plan you must run `plan` again.", "") + ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'") + cp.VerifyWasCalled(Once()).CreateComment(Any[logging.SimpleLogging](), Eq(pull.BaseRepo), Eq(pull.Num), + Eq("**Warning**: The plan for dir: `path` workspace: `workspace` was **discarded** via the Atlantis UI.\n\n"+ + "To `apply` this plan you must run `plan` again."), Eq("")) } diff --git a/server/controllers/templates/web_templates.go b/server/controllers/templates/web_templates.go deleted file mode 100644 index 01bbc2faac..0000000000 --- a/server/controllers/templates/web_templates.go +++ /dev/null @@ -1,695 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package templates - -import ( - "html/template" - "io" - "time" - - "github.com/runatlantis/atlantis/server/jobs" -) - -//go:generate pegomock generate --package mocks -o mocks/mock_template_writer.go TemplateWriter - -// TemplateWriter is an interface over html/template that's used to enable -// mocking. -type TemplateWriter interface { - // Execute applies a parsed template to the specified data object, - // writing the output to wr. - Execute(wr io.Writer, data interface{}) error -} - -// LockIndexData holds the fields needed to display the index view for locks. -type LockIndexData struct { - LockPath string - RepoFullName string - PullNum int - Path string - Workspace string - LockedBy string - Time time.Time - TimeFormatted string -} - -// ApplyLockData holds the fields to display in the index view -type ApplyLockData struct { - Locked bool - GlobalApplyLockEnabled bool - Time time.Time - TimeFormatted string -} - -// IndexData holds the data for rendering the index page -type IndexData struct { - Locks []LockIndexData - PullToJobMapping []jobs.PullInfoWithJobIDs - - ApplyLock ApplyLockData - AtlantisVersion string - // CleanedBasePath is the path Atlantis is accessible at externally. If - // not using a path-based proxy, this will be an empty string. Never ends - // in a '/' (hence "cleaned"). - CleanedBasePath string -} - -var IndexTemplate = template.Must(template.New("index.html.tmpl").Parse(` -<!DOCTYPE html> -<html lang="en"> -<head> - <meta charset="utf-8"> - <title>atlantis - - - - - - - - - - - -
-
- -

atlantis

-

Plan discarded and unlocked!

-
-
- {{ if .ApplyLock.GlobalApplyLockEnabled }} - {{ if .ApplyLock.Locked }} -
-
Apply commands are disabled globally
-
Lock Status: Active
-
Active Since: {{ .ApplyLock.TimeFormatted }}
- Enable Apply Commands -
- {{ else }} -
-
Apply commands are enabled
- Disable Apply Commands -
- {{ end }} - {{ end }} -
-
-
-
-
-

Locks

- {{ $basePath := .CleanedBasePath }} - {{ if .Locks }} -
-
- Repository - Project - Workspace - Locked By - Date/Time - Status -
- {{ range .Locks }} - - {{ end }} -
- {{ else }} -

No locks found.

- {{ end }} -
-
-
-
-
-

Jobs

- {{ if .PullToJobMapping }} -
-
- Repository - Project - Workspace - Date/Time - Step - Description -
- {{ range .PullToJobMapping }} -
- {{ .Pull.RepoFullName }} #{{ .Pull.PullNum }} - {{ if .Pull.Path }}{{ .Pull.Path }}{{ end }} - {{ if .Pull.Workspace }}{{ .Pull.Workspace }}{{ end }} - - {{ range .JobIDInfos }} -
{{ .TimeFormatted }}
- {{ end }} -
- - {{ range .JobIDInfos }} - - {{ end }} - - - {{ range .JobIDInfos }} -
{{ .JobDescription }}
- {{ end }} -
-
- {{ end }} -
- {{ else }} -

No jobs found.

- {{ end }} -
- - -
-
-{{ .AtlantisVersion }} -
- - - -`)) - -// LockDetailData holds the fields needed to display the lock detail view. -type LockDetailData struct { - LockKeyEncoded string - LockKey string - RepoOwner string - RepoName string - PullRequestLink string - LockedBy string - Workspace string - AtlantisVersion string - // CleanedBasePath is the path Atlantis is accessible at externally. If - // not using a path-based proxy, this will be an empty string. Never ends - // in a '/' (hence "cleaned"). - CleanedBasePath string -} - -var LockTemplate = template.Must(template.New("lock.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - -
-
- -

atlantis

-

{{.LockKey}} Locked

-
- -
-
-
-
Repo Owner:
{{.RepoOwner}}
-
Repo Name:
{{.RepoName}}
-
Pull Request Link:
-
Locked By:
{{.LockedBy}}
-
Workspace:
{{.Workspace}}
-
-
- Discard Plan & Unlock -
-
- -
-v{{ .AtlantisVersion }} -
- - - -`)) - -// ProjectJobData holds the data needed to stream the current PR information -type ProjectJobData struct { - AtlantisVersion string - ProjectPath string - CleanedBasePath string -} - -var ProjectJobsTemplate = template.Must(template.New("blank.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
- -

atlantis

-

-
-
-
-
- -
Initializing... -
- - - - - - - - - - - -`)) - -type ProjectJobsError struct { - AtlantisVersion string - ProjectPath string - CleanedBasePath string -} - -var ProjectJobsErrorTemplate = template.Must(template.New("blank.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

-

-
-
-
-
-
-
-
-
-
- - - - - - - - - -`)) - -// GithubSetupData holds the data for rendering the github app setup page -type GithubSetupData struct { - Target string - Manifest string - ID int64 - Key string - WebhookSecret string - URL string - CleanedBasePath string -} - -var GithubAppSetupTemplate = template.Must(template.New("github-app.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

- -

- {{ if .Target }} - Create a github app - {{ else }} - Github app created successfully! - {{ end }} -

-
-
- {{ if .Target }} -
- - -
- {{ else }} -

Visit {{ .URL }}/installations/new to install the app for your user or organization, then update the following values in your config and restart Atlantis:

- -
    -
  • gh-app-id:
    {{ .ID }}
  • -
  • gh-app-key-file:
    {{ .Key }}
  • -
  • gh-webhook-secret:
    {{ .WebhookSecret }}
  • -
- {{ end }} -
-
- - -`)) diff --git a/server/controllers/templates/mocks/mock_template_writer.go b/server/controllers/web_templates/mocks/mock_template_writer.go similarity index 97% rename from server/controllers/templates/mocks/mock_template_writer.go rename to server/controllers/web_templates/mocks/mock_template_writer.go index e3fafa580c..5d3e33a2ef 100644 --- a/server/controllers/templates/mocks/mock_template_writer.go +++ b/server/controllers/web_templates/mocks/mock_template_writer.go @@ -1,5 +1,5 @@ // Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/controllers/templates (interfaces: TemplateWriter) +// Source: github.com/runatlantis/atlantis/server/controllers/web_templates (interfaces: TemplateWriter) package mocks diff --git a/server/controllers/web_templates/templates/github-app.html.tmpl b/server/controllers/web_templates/templates/github-app.html.tmpl new file mode 100644 index 0000000000..34ce01550d --- /dev/null +++ b/server/controllers/web_templates/templates/github-app.html.tmpl @@ -0,0 +1,81 @@ + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+ +

+ {{ if .Target }} + Create a github app + {{ else }} + Github app created successfully! + {{ end }} +

+
+
+ {{ if .Target }} +
+ + +
+ {{ else }} +

Visit {{ .URL }}/installations/new to install the app for your user or organization, then update the following values in your config and restart Atlantis:

+ +
    +
  • gh-app-id:
    {{ .ID }}
  • +
  • gh-app-key-file:
    {{ .Key }}
  • +
  • gh-webhook-secret:
    {{ .WebhookSecret }}
  • +
+ {{ end }} +
+
+ + diff --git a/server/controllers/web_templates/templates/index.html.tmpl b/server/controllers/web_templates/templates/index.html.tmpl new file mode 100644 index 0000000000..b9021f9b61 --- /dev/null +++ b/server/controllers/web_templates/templates/index.html.tmpl @@ -0,0 +1,243 @@ + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+

Plan discarded and unlocked!

+
+
+ {{ if .ApplyLock.GlobalApplyLockEnabled }} + {{ if .ApplyLock.Locked }} +
+
Apply commands are disabled globally
+
Lock Status: Active
+
Active Since: {{ .ApplyLock.TimeFormatted }}
+ Enable Apply Commands +
+ {{ else }} +
+
Apply commands are enabled
+ Disable Apply Commands +
+ {{ end }} + {{ end }} +
+
+
+
+
+

Locks

+ {{ $basePath := .CleanedBasePath }} + {{ if .Locks }} +
+
+ Repository + Project + Workspace + Locked By + Date/Time + Status +
+ {{ range .Locks }} + + {{ end }} +
+ {{ else }} +

No locks found.

+ {{ end }} +
+
+
+
+
+

Jobs

+ {{ if .PullToJobMapping }} +
+
+ Repository + Project + Workspace + Date/Time + Step + Description +
+ {{ range .PullToJobMapping }} +
+ {{ .Pull.RepoFullName }} #{{ .Pull.PullNum }} + {{ if .Pull.Path }}{{ .Pull.Path }}{{ end }} + {{ if .Pull.Workspace }}{{ .Pull.Workspace }}{{ end }} + + {{ range .JobIDInfos }} +
{{ .TimeFormatted }}
+ {{ end }} +
+ + {{ range .JobIDInfos }} + + {{ end }} + + + {{ range .JobIDInfos }} +
{{ .JobDescription }}
+ {{ end }} +
+
+ {{ end }} +
+ {{ else }} +

No jobs found.

+ {{ end }} +
+ + +
+
+{{ .AtlantisVersion }} +
+ + + diff --git a/server/controllers/web_templates/templates/lock.html.tmpl b/server/controllers/web_templates/templates/lock.html.tmpl new file mode 100644 index 0000000000..56bf25a06b --- /dev/null +++ b/server/controllers/web_templates/templates/lock.html.tmpl @@ -0,0 +1,97 @@ + + + + + atlantis + + + + + + + + + + +
+
+ +

atlantis

+

{{.LockKey}} Locked

+
+ +
+
+
+
Repo Owner:
{{.RepoOwner}}
+
Repo Name:
{{.RepoName}}
+
Pull Request Link:
+
Locked By:
{{.LockedBy}}
+
Workspace:
{{.Workspace}}
+
+
+ Discard Plan & Unlock +
+
+ +
+v{{ .AtlantisVersion }} +
+ + + \ No newline at end of file diff --git a/server/controllers/web_templates/templates/project-jobs-error.html.tmpl b/server/controllers/web_templates/templates/project-jobs-error.html.tmpl new file mode 100644 index 0000000000..8eead799b7 --- /dev/null +++ b/server/controllers/web_templates/templates/project-jobs-error.html.tmpl @@ -0,0 +1,59 @@ + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+

+
+
+
+
+
+
+
+
+
+ + + + + + + + + diff --git a/server/controllers/web_templates/templates/project-jobs.html.tmpl b/server/controllers/web_templates/templates/project-jobs.html.tmpl new file mode 100644 index 0000000000..aaeb222568 --- /dev/null +++ b/server/controllers/web_templates/templates/project-jobs.html.tmpl @@ -0,0 +1,95 @@ + + + + + atlantis + + + + + + + + + + + +
+ +

atlantis

+

+
+
+
+
+ +
Initializing... +
+ + + + + + + + + + + diff --git a/server/controllers/web_templates/web_templates.go b/server/controllers/web_templates/web_templates.go new file mode 100644 index 0000000000..0794c80fba --- /dev/null +++ b/server/controllers/web_templates/web_templates.go @@ -0,0 +1,131 @@ +// Copyright 2017 HootSuite Media Inc. +// +// Licensed under the Apache License, Version 2.0 (the License); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an AS IS BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// Modified hereafter by contributors to runatlantis/atlantis. + +package web_templates + +import ( + "embed" + "html/template" + "io" + "time" + + "github.com/Masterminds/sprig/v3" + "github.com/runatlantis/atlantis/server/jobs" +) + +//go:generate pegomock generate --package mocks -o mocks/mock_template_writer.go TemplateWriter + +//go:embed templates/* +var templatesFS embed.FS + +// Read all the templates from the embedded filesystem +var templates, _ = template.New("").Funcs(sprig.TxtFuncMap()).ParseFS(templatesFS, "templates/*.tmpl") + +var templateFileNames = map[string]string{ + "index": "index.html.tmpl", + "lock": "lock.html.tmpl", + "project-jobs": "project-jobs.html.tmpl", + "project-jobs-error": "project-jobs-error.html.tmpl", + "github-app": "github-app.html.tmpl", +} + +// TemplateWriter is an interface over html/template that's used to enable +// mocking. +type TemplateWriter interface { + // Execute applies a parsed template to the specified data object, + // writing the output to wr. + Execute(wr io.Writer, data interface{}) error +} + +// LockIndexData holds the fields needed to display the index view for locks. +type LockIndexData struct { + LockPath string + RepoFullName string + PullNum int + Path string + Workspace string + LockedBy string + Time time.Time + TimeFormatted string +} + +// ApplyLockData holds the fields to display in the index view +type ApplyLockData struct { + Locked bool + GlobalApplyLockEnabled bool + Time time.Time + TimeFormatted string +} + +// IndexData holds the data for rendering the index page +type IndexData struct { + Locks []LockIndexData + PullToJobMapping []jobs.PullInfoWithJobIDs + + ApplyLock ApplyLockData + AtlantisVersion string + // CleanedBasePath is the path Atlantis is accessible at externally. If + // not using a path-based proxy, this will be an empty string. Never ends + // in a '/' (hence "cleaned"). + CleanedBasePath string +} + +var IndexTemplate = templates.Lookup(templateFileNames["index"]) + +// LockDetailData holds the fields needed to display the lock detail view. +type LockDetailData struct { + LockKeyEncoded string + LockKey string + RepoOwner string + RepoName string + PullRequestLink string + LockedBy string + Workspace string + AtlantisVersion string + // CleanedBasePath is the path Atlantis is accessible at externally. If + // not using a path-based proxy, this will be an empty string. Never ends + // in a '/' (hence "cleaned"). + CleanedBasePath string +} + +var LockTemplate = templates.Lookup(templateFileNames["lock"]) + +// ProjectJobData holds the data needed to stream the current PR information +type ProjectJobData struct { + AtlantisVersion string + ProjectPath string + CleanedBasePath string +} + +var ProjectJobsTemplate = templates.Lookup(templateFileNames["project-jobs"]) + +type ProjectJobsError struct { + AtlantisVersion string + ProjectPath string + CleanedBasePath string +} + +var ProjectJobsErrorTemplate = templates.Lookup(templateFileNames["project-jobs-error"]) + +// GithubSetupData holds the data for rendering the github app setup page +type GithubSetupData struct { + Target string + Manifest string + ID int64 + Key string + WebhookSecret string + URL string + CleanedBasePath string +} + +var GithubAppSetupTemplate = templates.Lookup(templateFileNames["github-app"]) diff --git a/server/controllers/templates/web_templates_test.go b/server/controllers/web_templates/web_templates_test.go similarity index 95% rename from server/controllers/templates/web_templates_test.go rename to server/controllers/web_templates/web_templates_test.go index 5b88c3e1d9..0ce6f00a9a 100644 --- a/server/controllers/templates/web_templates_test.go +++ b/server/controllers/web_templates/web_templates_test.go @@ -1,4 +1,4 @@ -package templates +package web_templates import ( "io" @@ -19,13 +19,13 @@ func TestIndexTemplate(t *testing.T) { Path: "path", Workspace: "workspace", Time: time.Now(), - TimeFormatted: "02-01-2006 15:04:05", + TimeFormatted: "2006-01-02 15:04:05", }, }, ApplyLock: ApplyLockData{ Locked: true, Time: time.Now(), - TimeFormatted: "02-01-2006 15:04:05", + TimeFormatted: "2006-01-02 15:04:05", }, AtlantisVersion: "v0.0.0", CleanedBasePath: "/path", diff --git a/server/core/config/parser_validator_test.go b/server/core/config/parser_validator_test.go index 08be7173b8..c21187bc47 100644 --- a/server/core/config/parser_validator_test.go +++ b/server/core/config/parser_validator_test.go @@ -1286,7 +1286,7 @@ func TestParseGlobalCfg(t *testing.T) { input: `repos: - id: /.*/ allowed_overrides: [invalid]`, - expErr: "repos: (0: (allowed_overrides: \"invalid\" is not a valid override, only \"plan_requirements\", \"apply_requirements\", \"import_requirements\", \"workflow\", \"delete_source_branch_on_merge\", \"repo_locking\", \"policy_check\", and \"custom_policy_check\" are supported.).).", + expErr: "repos: (0: (allowed_overrides: \"invalid\" is not a valid override, only \"plan_requirements\", \"apply_requirements\", \"import_requirements\", \"workflow\", \"delete_source_branch_on_merge\", \"repo_locking\", \"repo_locks\", \"policy_check\", \"custom_policy_check\", and \"silence_pr_comments\" are supported.).).", }, "invalid plan_requirement": { input: `repos: @@ -1306,8 +1306,14 @@ func TestParseGlobalCfg(t *testing.T) { import_requirements: [invalid]`, expErr: "repos: (0: (import_requirements: \"invalid\" is not a valid import_requirement, only \"approved\", \"mergeable\" and \"undiverged\" are supported.).).", }, + "invalid silence_pr_comments": { + input: `repos: +- id: /.*/ + silence_pr_comments: [invalid]`, + expErr: "server-side repo config 'silence_pr_comments' key value of 'invalid' is not supported, supported values are [plan, apply]", + }, "disable autodiscover": { - input: `repos: + input: `repos: - id: /.*/ autodiscover: mode: disabled`, @@ -1320,6 +1326,28 @@ func TestParseGlobalCfg(t *testing.T) { }, }, Workflows: defaultCfg.Workflows, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, + }, + }, + "disable repo locks": { + input: `repos: +- id: /.*/ + repo_locks: + mode: disabled`, + exp: valid.GlobalCfg{ + Repos: []valid.Repo{ + defaultCfg.Repos[0], + { + IDRegex: regexp.MustCompile(".*"), + RepoLocks: &valid.RepoLocks{Mode: valid.RepoLocksDisabledMode}, + }, + }, + Workflows: defaultCfg.Workflows, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, }, }, "no workflows key": { @@ -1340,6 +1368,9 @@ workflows: "default": defaultCfg.Workflows["default"], "name": defaultWorkflow("name"), }, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, }, }, "workflow stages empty": { @@ -1358,6 +1389,9 @@ workflows: "default": defaultCfg.Workflows["default"], "name": defaultWorkflow("name"), }, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, }, }, "workflow steps empty": { @@ -1381,6 +1415,9 @@ workflows: "default": defaultCfg.Workflows["default"], "name": defaultWorkflow("name"), }, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, }, }, "all keys specified": { @@ -1399,6 +1436,8 @@ repos: policy_check: true autodiscover: mode: enabled + repo_locks: + mode: on_apply - id: /.*/ branch: /(master|main)/ pre_workflow_hooks: @@ -1408,6 +1447,8 @@ repos: policy_check: false autodiscover: mode: disabled + repo_locks: + mode: disabled workflows: custom1: plan: @@ -1455,6 +1496,7 @@ policies: AllowCustomWorkflows: Bool(true), PolicyCheck: Bool(true), AutoDiscover: &valid.AutoDiscover{Mode: valid.AutoDiscoverEnabledMode}, + RepoLocks: &valid.RepoLocks{Mode: valid.RepoLocksOnApplyMode}, }, { IDRegex: regexp.MustCompile(".*"), @@ -1463,6 +1505,7 @@ policies: PostWorkflowHooks: postWorkflowHooks, PolicyCheck: Bool(false), AutoDiscover: &valid.AutoDiscover{Mode: valid.AutoDiscoverDisabledMode}, + RepoLocks: &valid.RepoLocks{Mode: valid.RepoLocksDisabledMode}, }, }, Workflows: map[string]valid.Workflow{ @@ -1481,6 +1524,9 @@ policies: }, }, }, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, }, }, "id regex with trailing slash": { @@ -1498,6 +1544,9 @@ repos: Workflows: map[string]valid.Workflow{ "default": defaultCfg.Workflows["default"], }, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, }, }, "referencing default workflow": { @@ -1517,6 +1566,9 @@ repos: Workflows: map[string]valid.Workflow{ "default": defaultCfg.Workflows["default"], }, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, }, }, "redefine default workflow": { @@ -1570,7 +1622,7 @@ workflows: AllowedOverrides: []string{}, AllowCustomWorkflows: Bool(false), DeleteSourceBranchOnMerge: Bool(false), - RepoLocking: Bool(true), + RepoLocks: &valid.DefaultRepoLocks, PolicyCheck: Bool(false), CustomPolicyCheck: Bool(false), AutoDiscover: raw.DefaultAutoDiscover(), @@ -1592,6 +1644,9 @@ workflows: }, }, }, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, }, }, } @@ -1721,6 +1776,9 @@ func TestParserValidator_ParseGlobalCfgJSON(t *testing.T) { "allow_custom_workflows": true, "autodiscover": { "mode": "enabled" + }, + "repo_locks": { + "mode": "on_apply" } }, { @@ -1782,6 +1840,7 @@ func TestParserValidator_ParseGlobalCfgJSON(t *testing.T) { AllowedOverrides: []string{"workflow", "apply_requirements"}, AllowCustomWorkflows: Bool(true), AutoDiscover: &valid.AutoDiscover{Mode: valid.AutoDiscoverEnabledMode}, + RepoLocks: &valid.RepoLocks{Mode: valid.RepoLocksOnApplyMode}, }, { ID: "github.com/owner/repo", @@ -1790,6 +1849,7 @@ func TestParserValidator_ParseGlobalCfgJSON(t *testing.T) { AllowedOverrides: nil, AllowCustomWorkflows: nil, AutoDiscover: nil, + RepoLocks: nil, }, }, Workflows: map[string]valid.Workflow{ @@ -1808,6 +1868,9 @@ func TestParserValidator_ParseGlobalCfgJSON(t *testing.T) { }, }, }, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, }, }, } diff --git a/server/core/config/raw/global_cfg.go b/server/core/config/raw/global_cfg.go index b795294239..275c79be71 100644 --- a/server/core/config/raw/global_cfg.go +++ b/server/core/config/raw/global_cfg.go @@ -8,6 +8,7 @@ import ( validation "github.com/go-ozzo/ozzo-validation" "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/core/config/valid" + "github.com/runatlantis/atlantis/server/utils" ) // GlobalCfg is the raw schema for server-side repo config. @@ -16,6 +17,7 @@ type GlobalCfg struct { Workflows map[string]Workflow `yaml:"workflows" json:"workflows"` PolicySets PolicySets `yaml:"policies" json:"policies"` Metrics Metrics `yaml:"metrics" json:"metrics"` + TeamAuthz TeamAuthz `yaml:"team_authz" json:"team_authz"` } // Repo is the raw schema for repos in the server-side repo config. @@ -34,9 +36,11 @@ type Repo struct { AllowCustomWorkflows *bool `yaml:"allow_custom_workflows,omitempty" json:"allow_custom_workflows,omitempty"` DeleteSourceBranchOnMerge *bool `yaml:"delete_source_branch_on_merge,omitempty" json:"delete_source_branch_on_merge,omitempty"` RepoLocking *bool `yaml:"repo_locking,omitempty" json:"repo_locking,omitempty"` + RepoLocks *RepoLocks `yaml:"repo_locks,omitempty" json:"repo_locks,omitempty"` PolicyCheck *bool `yaml:"policy_check,omitempty" json:"policy_check,omitempty"` CustomPolicyCheck *bool `yaml:"custom_policy_check,omitempty" json:"custom_policy_check,omitempty"` AutoDiscover *AutoDiscover `yaml:"autodiscover,omitempty" json:"autodiscover,omitempty"` + SilencePRComments []string `yaml:"silence_pr_comments,omitempty" json:"silence_pr_comments,omitempty"` } func (g GlobalCfg) Validate() error { @@ -93,6 +97,24 @@ func (g GlobalCfg) Validate() error { } } } + + // Validate supported SilencePRComments values. + for _, repo := range g.Repos { + if repo.SilencePRComments == nil { + continue + } + for _, silenceStage := range repo.SilencePRComments { + if !utils.SlicesContains(valid.AllowedSilencePRComments, silenceStage) { + return fmt.Errorf( + "server-side repo config '%s' key value of '%s' is not supported, supported values are [%s]", + valid.SilencePRCommentsKey, + silenceStage, + strings.Join(valid.AllowedSilencePRComments, ", "), + ) + } + } + } + return nil } @@ -140,6 +162,7 @@ func (g GlobalCfg) ToValid(defaultCfg valid.GlobalCfg) valid.GlobalCfg { Workflows: workflows, PolicySets: g.PolicySets.ToValid(), Metrics: g.Metrics.ToValid(), + TeamAuthz: g.TeamAuthz.ToValid(), } } @@ -194,8 +217,8 @@ func (r Repo) Validate() error { overridesValid := func(value interface{}) error { overrides := value.([]string) for _, o := range overrides { - if o != valid.PlanRequirementsKey && o != valid.ApplyRequirementsKey && o != valid.ImportRequirementsKey && o != valid.WorkflowKey && o != valid.DeleteSourceBranchOnMergeKey && o != valid.RepoLockingKey && o != valid.PolicyCheckKey && o != valid.CustomPolicyCheckKey { - return fmt.Errorf("%q is not a valid override, only %q, %q, %q, %q, %q, %q, %q, and %q are supported", o, valid.PlanRequirementsKey, valid.ApplyRequirementsKey, valid.ImportRequirementsKey, valid.WorkflowKey, valid.DeleteSourceBranchOnMergeKey, valid.RepoLockingKey, valid.PolicyCheckKey, valid.CustomPolicyCheckKey) + if o != valid.PlanRequirementsKey && o != valid.ApplyRequirementsKey && o != valid.ImportRequirementsKey && o != valid.WorkflowKey && o != valid.DeleteSourceBranchOnMergeKey && o != valid.RepoLockingKey && o != valid.RepoLocksKey && o != valid.PolicyCheckKey && o != valid.CustomPolicyCheckKey && o != valid.SilencePRCommentsKey { + return fmt.Errorf("%q is not a valid override, only %q, %q, %q, %q, %q, %q, %q, %q, %q, and %q are supported", o, valid.PlanRequirementsKey, valid.ApplyRequirementsKey, valid.ImportRequirementsKey, valid.WorkflowKey, valid.DeleteSourceBranchOnMergeKey, valid.RepoLockingKey, valid.RepoLocksKey, valid.PolicyCheckKey, valid.CustomPolicyCheckKey, valid.SilencePRCommentsKey) } } return nil @@ -220,6 +243,14 @@ func (r Repo) Validate() error { return nil } + repoLocksValid := func(value interface{}) error { + repoLocks := value.(*RepoLocks) + if repoLocks != nil { + return repoLocks.Validate() + } + return nil + } + return validation.ValidateStruct(&r, validation.Field(&r.ID, validation.Required, validation.By(idValid)), validation.Field(&r.Branch, validation.By(branchValid)), @@ -231,6 +262,7 @@ func (r Repo) Validate() error { validation.Field(&r.Workflow, validation.By(workflowExists)), validation.Field(&r.DeleteSourceBranchOnMerge, validation.By(deleteSourceBranchOnMergeValid)), validation.Field(&r.AutoDiscover, validation.By(autoDiscoverValid)), + validation.Field(&r.RepoLocks, validation.By(repoLocksValid)), ) } @@ -330,6 +362,11 @@ OuterGlobalImportReqs: autoDiscover = r.AutoDiscover.ToValid() } + var repoLocks *valid.RepoLocks + if r.RepoLocks != nil { + repoLocks = r.RepoLocks.ToValid() + } + return valid.Repo{ ID: id, IDRegex: idRegex, @@ -346,8 +383,10 @@ OuterGlobalImportReqs: AllowCustomWorkflows: r.AllowCustomWorkflows, DeleteSourceBranchOnMerge: r.DeleteSourceBranchOnMerge, RepoLocking: r.RepoLocking, + RepoLocks: repoLocks, PolicyCheck: r.PolicyCheck, CustomPolicyCheck: r.CustomPolicyCheck, AutoDiscover: autoDiscover, + SilencePRComments: r.SilencePRComments, } } diff --git a/server/core/config/raw/policies.go b/server/core/config/raw/policies.go index aee7019ba9..be10e11c09 100644 --- a/server/core/config/raw/policies.go +++ b/server/core/config/raw/policies.go @@ -70,11 +70,12 @@ func (o PolicyOwners) ToValid() valid.PolicyOwners { } type PolicySet struct { - Path string `yaml:"path" json:"path"` - Source string `yaml:"source" json:"source"` - Name string `yaml:"name" json:"name"` - Owners PolicyOwners `yaml:"owners,omitempty" json:"owners,omitempty"` - ApproveCount int `yaml:"approve_count,omitempty" json:"approve_count,omitempty"` + Path string `yaml:"path" json:"path"` + Source string `yaml:"source" json:"source"` + Name string `yaml:"name" json:"name"` + Owners PolicyOwners `yaml:"owners,omitempty" json:"owners,omitempty"` + ApproveCount int `yaml:"approve_count,omitempty" json:"approve_count,omitempty"` + PreventSelfApprove bool `yaml:"prevent_self_approve,omitempty" json:"prevent_self_approve,omitempty"` } func (p PolicySet) Validate() error { @@ -94,6 +95,7 @@ func (p PolicySet) ToValid() valid.PolicySet { policySet.Path = p.Path policySet.Source = p.Source policySet.ApproveCount = p.ApproveCount + policySet.PreventSelfApprove = p.PreventSelfApprove policySet.Owners = p.Owners.ToValid() return policySet diff --git a/server/core/config/raw/project.go b/server/core/config/raw/project.go index d73062cef3..fe0e656a8c 100644 --- a/server/core/config/raw/project.go +++ b/server/core/config/raw/project.go @@ -21,22 +21,24 @@ const ( ) type Project struct { - Name *string `yaml:"name,omitempty"` - Branch *string `yaml:"branch,omitempty"` - Dir *string `yaml:"dir,omitempty"` - Workspace *string `yaml:"workspace,omitempty"` - Workflow *string `yaml:"workflow,omitempty"` - TerraformVersion *string `yaml:"terraform_version,omitempty"` - Autoplan *Autoplan `yaml:"autoplan,omitempty"` - PlanRequirements []string `yaml:"plan_requirements,omitempty"` - ApplyRequirements []string `yaml:"apply_requirements,omitempty"` - ImportRequirements []string `yaml:"import_requirements,omitempty"` - DependsOn []string `yaml:"depends_on,omitempty"` - DeleteSourceBranchOnMerge *bool `yaml:"delete_source_branch_on_merge,omitempty"` - RepoLocking *bool `yaml:"repo_locking,omitempty"` - ExecutionOrderGroup *int `yaml:"execution_order_group,omitempty"` - PolicyCheck *bool `yaml:"policy_check,omitempty"` - CustomPolicyCheck *bool `yaml:"custom_policy_check,omitempty"` + Name *string `yaml:"name,omitempty"` + Branch *string `yaml:"branch,omitempty"` + Dir *string `yaml:"dir,omitempty"` + Workspace *string `yaml:"workspace,omitempty"` + Workflow *string `yaml:"workflow,omitempty"` + TerraformVersion *string `yaml:"terraform_version,omitempty"` + Autoplan *Autoplan `yaml:"autoplan,omitempty"` + PlanRequirements []string `yaml:"plan_requirements,omitempty"` + ApplyRequirements []string `yaml:"apply_requirements,omitempty"` + ImportRequirements []string `yaml:"import_requirements,omitempty"` + DependsOn []string `yaml:"depends_on,omitempty"` + DeleteSourceBranchOnMerge *bool `yaml:"delete_source_branch_on_merge,omitempty"` + RepoLocking *bool `yaml:"repo_locking,omitempty"` + RepoLocks *RepoLocks `yaml:"repo_locks,omitempty"` + ExecutionOrderGroup *int `yaml:"execution_order_group,omitempty"` + PolicyCheck *bool `yaml:"policy_check,omitempty"` + CustomPolicyCheck *bool `yaml:"custom_policy_check,omitempty"` + SilencePRComments []string `yaml:"silence_pr_comments,omitempty"` } func (p Project) Validate() error { @@ -139,6 +141,10 @@ func (p Project) ToValid() valid.Project { v.RepoLocking = p.RepoLocking } + if p.RepoLocks != nil { + v.RepoLocks = p.RepoLocks.ToValid() + } + if p.ExecutionOrderGroup != nil { v.ExecutionOrderGroup = *p.ExecutionOrderGroup } @@ -151,6 +157,10 @@ func (p Project) ToValid() valid.Project { v.CustomPolicyCheck = p.CustomPolicyCheck } + if p.SilencePRComments != nil { + v.SilencePRComments = p.SilencePRComments + } + return v } diff --git a/server/core/config/raw/project_test.go b/server/core/config/raw/project_test.go index 72a8dd78d0..3c69177f96 100644 --- a/server/core/config/raw/project_test.go +++ b/server/core/config/raw/project_test.go @@ -331,6 +331,7 @@ func TestProject_Validate(t *testing.T) { func TestProject_ToValid(t *testing.T) { tfVersionPointEleven, _ := version.NewVersion("v0.11.0") + repoLocksOnApply := valid.RepoLocksOnApplyMode cases := []struct { description string input raw.Project @@ -366,6 +367,9 @@ func TestProject_ToValid(t *testing.T) { WhenModified: []string{"hi"}, Enabled: Bool(false), }, + RepoLocks: &raw.RepoLocks{ + Mode: &repoLocksOnApply, + }, ApplyRequirements: []string{"approved"}, Name: String("myname"), ExecutionOrderGroup: Int(10), @@ -379,6 +383,9 @@ func TestProject_ToValid(t *testing.T) { WhenModified: []string{"hi"}, Enabled: false, }, + RepoLocks: &valid.RepoLocks{ + Mode: repoLocksOnApply, + }, ApplyRequirements: []string{"approved"}, Name: String("myname"), ExecutionOrderGroup: 10, diff --git a/server/core/config/raw/repo_cfg.go b/server/core/config/raw/repo_cfg.go index f3a688725d..9aa18c7733 100644 --- a/server/core/config/raw/repo_cfg.go +++ b/server/core/config/raw/repo_cfg.go @@ -27,6 +27,8 @@ type RepoCfg struct { EmojiReaction *string `yaml:"emoji_reaction,omitempty"` AllowedRegexpPrefixes []string `yaml:"allowed_regexp_prefixes,omitempty"` AbortOnExcecutionOrderFail *bool `yaml:"abort_on_execution_order_fail,omitempty"` + RepoLocks *RepoLocks `yaml:"repo_locks,omitempty"` + SilencePRComments []string `yaml:"silence_pr_comments,omitempty"` } func (r RepoCfg) Validate() error { @@ -77,6 +79,10 @@ func (r RepoCfg) ToValid() valid.RepoCfg { autoDiscover = r.AutoDiscover.ToValid() } + var repoLocks *valid.RepoLocks + if r.RepoLocks != nil { + repoLocks = r.RepoLocks.ToValid() + } return valid.RepoCfg{ Version: *r.Version, Projects: validProjects, @@ -90,5 +96,7 @@ func (r RepoCfg) ToValid() valid.RepoCfg { AllowedRegexpPrefixes: r.AllowedRegexpPrefixes, EmojiReaction: emojiReaction, AbortOnExcecutionOrderFail: abortOnExcecutionOrderFail, + RepoLocks: repoLocks, + SilencePRComments: r.SilencePRComments, } } diff --git a/server/core/config/raw/repo_cfg_test.go b/server/core/config/raw/repo_cfg_test.go index 31d01101dd..b3844ee68c 100644 --- a/server/core/config/raw/repo_cfg_test.go +++ b/server/core/config/raw/repo_cfg_test.go @@ -11,6 +11,8 @@ import ( func TestConfig_UnmarshalYAML(t *testing.T) { autoDiscoverEnabled := valid.AutoDiscoverEnabledMode + repoLocksDisabled := valid.RepoLocksDisabledMode + repoLocksOnApply := valid.RepoLocksOnApplyMode cases := []struct { description string input string @@ -130,6 +132,8 @@ autodiscover: mode: enabled parallel_apply: true parallel_plan: false +repo_locks: + mode: on_apply projects: - dir: mydir workspace: myworkspace @@ -139,6 +143,8 @@ projects: enabled: false when_modified: [] apply_requirements: [mergeable] + repo_locks: + mode: disabled workflows: default: plan: @@ -156,6 +162,7 @@ allowed_regexp_prefixes: Automerge: Bool(true), ParallelApply: Bool(true), ParallelPlan: Bool(false), + RepoLocks: &raw.RepoLocks{Mode: &repoLocksOnApply}, Projects: []raw.Project{ { Dir: String("mydir"), @@ -167,6 +174,7 @@ allowed_regexp_prefixes: Enabled: Bool(false), }, ApplyRequirements: []string{"mergeable"}, + RepoLocks: &raw.RepoLocks{Mode: &repoLocksDisabled}, }, }, Workflows: map[string]raw.Workflow{ @@ -236,6 +244,7 @@ func TestConfig_Validate(t *testing.T) { func TestConfig_ToValid(t *testing.T) { autoDiscoverEnabled := valid.AutoDiscoverEnabledMode + repoLocksOnApply := valid.RepoLocksOnApplyMode cases := []struct { description string input raw.RepoCfg @@ -256,12 +265,14 @@ func TestConfig_ToValid(t *testing.T) { AutoDiscover: &raw.AutoDiscover{}, Workflows: map[string]raw.Workflow{}, Projects: []raw.Project{}, + RepoLocks: &raw.RepoLocks{}, }, exp: valid.RepoCfg{ Version: 2, AutoDiscover: raw.DefaultAutoDiscover(), Workflows: map[string]valid.Workflow{}, Projects: nil, + RepoLocks: &valid.DefaultRepoLocks, }, }, { @@ -333,6 +344,30 @@ func TestConfig_ToValid(t *testing.T) { Workflows: map[string]valid.Workflow{}, }, }, + { + description: "repo_locks omitted", + input: raw.RepoCfg{ + Version: Int(2), + }, + exp: valid.RepoCfg{ + Version: 2, + Workflows: map[string]valid.Workflow{}, + }, + }, + { + description: "repo_locks included", + input: raw.RepoCfg{ + Version: Int(2), + RepoLocks: &raw.RepoLocks{Mode: &repoLocksOnApply}, + }, + exp: valid.RepoCfg{ + Version: 2, + RepoLocks: &valid.RepoLocks{ + Mode: valid.RepoLocksOnApplyMode, + }, + Workflows: map[string]valid.Workflow{}, + }, + }, { description: "only plan stage set", input: raw.RepoCfg{ @@ -372,6 +407,9 @@ func TestConfig_ToValid(t *testing.T) { AutoDiscover: &raw.AutoDiscover{ Mode: &autoDiscoverEnabled, }, + RepoLocks: &raw.RepoLocks{ + Mode: &repoLocksOnApply, + }, Workflows: map[string]raw.Workflow{ "myworkflow": { Apply: &raw.Stage{ @@ -424,6 +462,9 @@ func TestConfig_ToValid(t *testing.T) { AutoDiscover: &valid.AutoDiscover{ Mode: valid.AutoDiscoverEnabledMode, }, + RepoLocks: &valid.RepoLocks{ + Mode: valid.RepoLocksOnApplyMode, + }, Workflows: map[string]valid.Workflow{ "myworkflow": { Name: "myworkflow", diff --git a/server/core/config/raw/repo_locks.go b/server/core/config/raw/repo_locks.go new file mode 100644 index 0000000000..60ab8461fa --- /dev/null +++ b/server/core/config/raw/repo_locks.go @@ -0,0 +1,30 @@ +package raw + +import ( + validation "github.com/go-ozzo/ozzo-validation" + "github.com/runatlantis/atlantis/server/core/config/valid" +) + +type RepoLocks struct { + Mode *valid.RepoLocksMode `yaml:"mode,omitempty"` +} + +func (a RepoLocks) ToValid() *valid.RepoLocks { + var v valid.RepoLocks + + if a.Mode != nil { + v.Mode = *a.Mode + } else { + v.Mode = valid.DefaultRepoLocksMode + } + + return &v +} + +func (a RepoLocks) Validate() error { + res := validation.ValidateStruct(&a, + // If a.Mode is nil, this should still pass validation. + validation.Field(&a.Mode, validation.In(valid.RepoLocksDisabledMode, valid.RepoLocksOnPlanMode, valid.RepoLocksOnApplyMode)), + ) + return res +} diff --git a/server/core/config/raw/repo_locks_test.go b/server/core/config/raw/repo_locks_test.go new file mode 100644 index 0000000000..8a8d45a0fe --- /dev/null +++ b/server/core/config/raw/repo_locks_test.go @@ -0,0 +1,128 @@ +package raw_test + +import ( + "testing" + + "github.com/runatlantis/atlantis/server/core/config/raw" + "github.com/runatlantis/atlantis/server/core/config/valid" + . "github.com/runatlantis/atlantis/testing" +) + +func TestRepoLocks_UnmarshalYAML(t *testing.T) { + repoLocksOnPlan := valid.RepoLocksOnPlanMode + cases := []struct { + description string + input string + exp raw.RepoLocks + }{ + { + description: "omit unset fields", + input: "", + exp: raw.RepoLocks{ + Mode: nil, + }, + }, + { + description: "all fields set", + input: ` +mode: on_plan +`, + exp: raw.RepoLocks{ + Mode: &repoLocksOnPlan, + }, + }, + } + + for _, c := range cases { + t.Run(c.description, func(t *testing.T) { + var a raw.RepoLocks + err := unmarshalString(c.input, &a) + Ok(t, err) + Equals(t, c.exp, a) + }) + } +} + +func TestRepoLocks_Validate(t *testing.T) { + repoLocksDisabled := valid.RepoLocksDisabledMode + repoLocksOnPlan := valid.RepoLocksOnPlanMode + repoLocksOnApply := valid.RepoLocksOnApplyMode + randomString := valid.RepoLocksMode("random_string") + cases := []struct { + description string + input raw.RepoLocks + errContains *string + }{ + { + description: "nothing set", + input: raw.RepoLocks{}, + errContains: nil, + }, + { + description: "mode set to disabled", + input: raw.RepoLocks{ + Mode: &repoLocksDisabled, + }, + errContains: nil, + }, + { + description: "mode set to on_plan", + input: raw.RepoLocks{ + Mode: &repoLocksOnPlan, + }, + errContains: nil, + }, + { + description: "mode set to on_apply", + input: raw.RepoLocks{ + Mode: &repoLocksOnApply, + }, + errContains: nil, + }, + { + description: "mode set to random string", + input: raw.RepoLocks{ + Mode: &randomString, + }, + errContains: String("valid value"), + }, + } + for _, c := range cases { + t.Run(c.description, func(t *testing.T) { + if c.errContains == nil { + Ok(t, c.input.Validate()) + } else { + ErrContains(t, *c.errContains, c.input.Validate()) + } + }) + } +} + +func TestRepoLocks_ToValid(t *testing.T) { + repoLocksOnApply := valid.RepoLocksOnApplyMode + cases := []struct { + description string + input raw.RepoLocks + exp *valid.RepoLocks + }{ + { + description: "nothing set", + input: raw.RepoLocks{}, + exp: &valid.DefaultRepoLocks, + }, + { + description: "value set", + input: raw.RepoLocks{ + Mode: &repoLocksOnApply, + }, + exp: &valid.RepoLocks{ + Mode: valid.RepoLocksOnApplyMode, + }, + }, + } + for _, c := range cases { + t.Run(c.description, func(t *testing.T) { + Equals(t, c.exp, c.input.ToValid()) + }) + } +} diff --git a/server/core/config/raw/step.go b/server/core/config/raw/step.go index 68f8899717..6ada93488c 100644 --- a/server/core/config/raw/step.go +++ b/server/core/config/raw/step.go @@ -9,6 +9,7 @@ import ( validation "github.com/go-ozzo/ozzo-validation" "github.com/runatlantis/atlantis/server/core/config/valid" + "github.com/runatlantis/atlantis/server/utils" ) const ( @@ -27,42 +28,58 @@ const ( MultiEnvStepName = "multienv" ImportStepName = "import" StateRmStepName = "state_rm" + ShellArgKey = "shell" + ShellArgsArgKey = "shellArgs" ) -// Step represents a single action/command to perform. In YAML, it can be set as -// 1. A single string for a built-in command: -// - init -// - plan -// - policy_check -// -// 2. A map for an env step with name and command or value, or a run step with a command and output config -// - env: -// name: test -// command: echo 312 -// value: value -// - run: -// command: my custom command -// output: hide -// -// 3. A map for a built-in command and extra_args: -// - plan: -// extra_args: [-var-file=staging.tfvars] -// -// 4. A map for a custom run command: -// - run: my custom command -// -// Here we parse step in the most generic fashion possible. See fields for more -// details. +/* +Step represents a single action/command to perform. In YAML, it can be set as +1. A single string for a built-in command: + - init + - plan + - policy_check + +2. A map for an env step with name and command or value, or a run step with a command and output config + - env: + name: test_command + command: echo 312 + - env: + name: test_value + value: value + - env: + name: test_bash_command + command: echo ${test_value::7} + shell: bash + shellArgs: ["--verbose", "-c"] + - multienv: + command: envs.sh + output: hide + shell: sh + shellArgs: -c + - run: + command: my custom command + output: hide + +3. A map for a built-in command and extra_args: + - plan: + extra_args: [-var-file=staging.tfvars] + +4. A map for a custom run command: + - run: my custom command + +Here we parse step in the most generic fashion possible. See fields for more +details. +*/ type Step struct { // Key will be set in case #1 and #3 above to the key. In case #2, there // could be multiple keys (since the element is a map) so we don't set Key. Key *string - // EnvOrRun will be set in case #2 above. - EnvOrRun map[string]map[string]string - // Map will be set in case #3 above. - Map map[string]map[string][]string // StringVal will be set in case #4 above. StringVal map[string]string + // Map will be set in case #3 above. + Map map[string]map[string][]string + // CommandMap will be set in case #2 above. + CommandMap map[string]map[string]interface{} } func (s *Step) UnmarshalYAML(unmarshal func(interface{}) error) error { @@ -139,15 +156,16 @@ func (s Step) Validate() error { } for k := range args { if k != ExtraArgsKey { - return fmt.Errorf("built-in steps only support a single %s key, found %q in step %s", ExtraArgsKey, k, stepName) + return fmt.Errorf("built-in steps only support a single %s key, found %q in step %s", + ExtraArgsKey, k, stepName) } } } return nil } - envOrRunStep := func(value interface{}) error { - elem := value.(map[string]map[string]string) + envOrRunOrMultiEnvStep := func(value interface{}) error { + elem := value.(map[string]map[string]interface{}) var keys []string for k := range elem { keys = append(keys, k) @@ -166,65 +184,104 @@ func (s Step) Validate() error { stepName := keys[0] args := elem[keys[0]] - switch stepName { - case EnvStepName: - var argKeys []string - for k := range args { - argKeys = append(argKeys, k) + var argKeys []string + for k := range args { + argKeys = append(argKeys, k) + } + argMap := make(map[string]interface{}) + for k, v := range args { + argMap[k] = v + } + // Sort so tests can be deterministic. + sort.Strings(argKeys) + + // Validate keys common for all the steps. + if utils.SlicesContains(argKeys, ShellArgKey) && !utils.SlicesContains(argKeys, CommandArgKey) { + return fmt.Errorf("workflow steps only support %q key in combination with %q key", + ShellArgKey, CommandArgKey) + } + if utils.SlicesContains(argKeys, ShellArgsArgKey) && !utils.SlicesContains(argKeys, ShellArgKey) { + return fmt.Errorf("workflow steps only support %q key in combination with %q key", + ShellArgsArgKey, ShellArgKey) + } + + switch t := argMap[ShellArgsArgKey].(type) { + case nil: + case string: + case []interface{}: + for _, e := range t { + if _, ok := e.(string); !ok { + return fmt.Errorf("%q step %q option must contain only strings, found %v\n", + stepName, ShellArgsArgKey, e) + } } - // Sort so tests can be deterministic. - sort.Strings(argKeys) + default: + return fmt.Errorf("%q step %q option must be a string or a list of strings, found %v\n", + stepName, ShellArgsArgKey, t) + } + delete(argMap, ShellArgsArgKey) + delete(argMap, ShellArgKey) + // Validate keys per step type. + switch stepName { + case EnvStepName: foundNameKey := false for _, k := range argKeys { - if k != NameArgKey && k != CommandArgKey && k != ValueArgKey { - return fmt.Errorf("env steps only support keys %q, %q and %q, found key %q", NameArgKey, ValueArgKey, CommandArgKey, k) + if k != NameArgKey && k != CommandArgKey && k != ValueArgKey && k != ShellArgKey && k != ShellArgsArgKey { + return fmt.Errorf("env steps only support keys %q, %q, %q, %q and %q, found key %q", + NameArgKey, ValueArgKey, CommandArgKey, ShellArgKey, ShellArgsArgKey, k) } if k == NameArgKey { foundNameKey = true } } + delete(argMap, CommandArgKey) if !foundNameKey { return fmt.Errorf("env steps must have a %q key set", NameArgKey) } - // If we have 3 keys at this point then they've set both command and value. - if len(argKeys) != 2 { + delete(argMap, NameArgKey) + if utils.SlicesContains(argKeys, ValueArgKey) && utils.SlicesContains(argKeys, CommandArgKey) { return fmt.Errorf("env steps only support one of the %q or %q keys, found both", ValueArgKey, CommandArgKey) } - case RunStepName: - argsCopy := make(map[string]string) - for k, v := range args { - argsCopy[k] = v - } - args = argsCopy - if _, ok := args[CommandArgKey]; !ok { - return fmt.Errorf("run step must have a %q key set", CommandArgKey) + delete(argMap, ValueArgKey) + case RunStepName, MultiEnvStepName: + if _, ok := argMap[CommandArgKey].(string); !ok { + return fmt.Errorf("%q step must have a %q key set", stepName, CommandArgKey) } - delete(args, CommandArgKey) - if v, ok := args[OutputArgKey]; ok { - if !(v == valid.PostProcessRunOutputShow || v == valid.PostProcessRunOutputHide || v == valid.PostProcessRunOutputStripRefreshing) { - return fmt.Errorf("run step %q option must be one of %q, %q, or %q", OutputArgKey, valid.PostProcessRunOutputShow, valid.PostProcessRunOutputHide, valid.PostProcessRunOutputStripRefreshing) + delete(argMap, CommandArgKey) + if v, ok := argMap[OutputArgKey].(string); ok { + if stepName == RunStepName && !(v == valid.PostProcessRunOutputShow || + v == valid.PostProcessRunOutputHide || v == valid.PostProcessRunOutputStripRefreshing) { + return fmt.Errorf("run step %q option must be one of %q, %q, or %q", + OutputArgKey, valid.PostProcessRunOutputShow, valid.PostProcessRunOutputHide, + valid.PostProcessRunOutputStripRefreshing) + } else if stepName == MultiEnvStepName && !(v == valid.PostProcessRunOutputShow || + v == valid.PostProcessRunOutputHide) { + return fmt.Errorf("multienv step %q option must be %q or %q", + OutputArgKey, valid.PostProcessRunOutputShow, valid.PostProcessRunOutputHide) } } - delete(args, OutputArgKey) - if len(args) > 0 { - var argKeys []string - for k := range args { - argKeys = append(argKeys, k) - } - // Sort so tests can be deterministic. - sort.Strings(argKeys) - return fmt.Errorf("run steps only support keys %q, %q and %q, found extra keys %q", RunStepName, CommandArgKey, OutputArgKey, strings.Join(argKeys, ",")) - } + delete(argMap, OutputArgKey) default: return fmt.Errorf("%q is not a valid step type", stepName) } + if len(argMap) > 0 { + var argKeys []string + for k := range argMap { + argKeys = append(argKeys, k) + } + // Sort so tests can be deterministic. + sort.Strings(argKeys) + return fmt.Errorf("%q steps only support keys %q, %q, %q and %q, found extra keys %q", + stepName, CommandArgKey, OutputArgKey, ShellArgKey, ShellArgsArgKey, strings.Join(argKeys, ",")) + } + return nil } - runStep := func(value interface{}) error { + runOrMultiEnvStep := func(value interface{}) error { elem := value.(map[string]string) var keys []string for k := range elem { @@ -238,7 +295,7 @@ func (s Step) Validate() error { len(keys), strings.Join(keys, ",")) } for stepName := range elem { - if stepName != RunStepName && stepName != MultiEnvStepName { + if !(stepName == RunStepName || stepName == MultiEnvStepName) { return fmt.Errorf("%q is not a valid step type", stepName) } } @@ -251,11 +308,11 @@ func (s Step) Validate() error { if len(s.Map) > 0 { return validation.Validate(s.Map, validation.By(extraArgs)) } - if len(s.EnvOrRun) > 0 { - return validation.Validate(s.EnvOrRun, validation.By(envOrRunStep)) + if len(s.CommandMap) > 0 { + return validation.Validate(s.CommandMap, validation.By(envOrRunOrMultiEnvStep)) } if len(s.StringVal) > 0 { - return validation.Validate(s.StringVal, validation.By(runStep)) + return validation.Validate(s.StringVal, validation.By(runOrMultiEnvStep)) } return errors.New("step element is empty") } @@ -269,20 +326,44 @@ func (s Step) ToValid() valid.Step { } // This will trigger in case #2 (see Step docs). - if len(s.EnvOrRun) > 0 { + if len(s.CommandMap) > 0 { // After validation we assume there's only one key and it's a valid // step name so we just use the first one. - for stepName, stepArgs := range s.EnvOrRun { - step := valid.Step{ - StepName: stepName, - EnvVarName: stepArgs[NameArgKey], - RunCommand: stepArgs[CommandArgKey], - EnvVarValue: stepArgs[ValueArgKey], - Output: valid.PostProcessRunOutputOption(stepArgs[OutputArgKey]), + for stepName, stepArgs := range s.CommandMap { + step := valid.Step{StepName: stepName} + if name, ok := stepArgs[NameArgKey].(string); ok { + step.EnvVarName = name + } + if command, ok := stepArgs[CommandArgKey].(string); ok { + step.RunCommand = command + } + if value, ok := stepArgs[ValueArgKey].(string); ok { + step.EnvVarValue = value + } + if output, ok := stepArgs[OutputArgKey].(string); ok { + step.Output = valid.PostProcessRunOutputOption(output) + } + if shell, ok := stepArgs[ShellArgKey].(string); ok { + step.RunShell = &valid.CommandShell{ + Shell: shell, + ShellArgs: []string{"-c"}, + } } if step.StepName == RunStepName && step.Output == "" { step.Output = valid.PostProcessRunOutputShow } + + switch t := stepArgs[ShellArgsArgKey].(type) { + case nil: + case string: + step.RunShell.ShellArgs = strings.Split(t, " ") + case []interface{}: + step.RunShell.ShellArgs = []string{} + for _, e := range t { + step.RunShell.ShellArgs = append(step.RunShell.ShellArgs, e.(string)) + } + } + return step } } @@ -336,6 +417,17 @@ func (s *Step) unmarshalGeneric(unmarshal func(interface{}) error) error { return nil } + // Try to unmarshal as a custom run step, ex. + // steps: + // - run: my command + // We validate if the key is run later. + var runStep map[string]string + err = unmarshal(&runStep) + if err == nil { + s.StringVal = runStep + return nil + } + // This represents a step with extra_args, ex: // init: // extra_args: [a, b] @@ -348,26 +440,20 @@ func (s *Step) unmarshalGeneric(unmarshal func(interface{}) error) error { return nil } - // This represents an env step, ex: - // env: - // name: k - // value: hi //optional - // command: exec - var envStep map[string]map[string]string - err = unmarshal(&envStep) - if err == nil { - s.EnvOrRun = envStep - return nil - } - - // Try to unmarshal as a custom run step, ex. + // This represents a command steps env, run, and multienv, ex: // steps: - // - run: my command - // We validate if the key is run later. - var runStep map[string]string - err = unmarshal(&runStep) + // - env: + // name: k + // command: exec + // - run: + // name: test_bash_command + // command: echo ${test_value::7} + // shell: bash + // shellArgs: ["--verbose", "-c"] + var commandStep map[string]map[string]interface{} + err = unmarshal(&commandStep) if err == nil { - s.StringVal = runStep + s.CommandMap = commandStep return nil } @@ -379,8 +465,8 @@ func (s Step) marshalGeneric() (interface{}, error) { return s.StringVal, nil } else if len(s.Map) != 0 { return s.Map, nil - } else if len(s.EnvOrRun) != 0 { - return s.EnvOrRun, nil + } else if len(s.CommandMap) != 0 { + return s.CommandMap, nil } else if s.Key != nil { return s.Key, nil } diff --git a/server/core/config/raw/step_test.go b/server/core/config/raw/step_test.go index 72003e2c01..f8b9ae8b11 100644 --- a/server/core/config/raw/step_test.go +++ b/server/core/config/raw/step_test.go @@ -81,7 +81,7 @@ env: value: direct_value name: test`, exp: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "value": "direct_value", "name": "test", @@ -96,7 +96,7 @@ env: command: echo 123 name: test`, exp: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "command": "echo 123", "name": "test", @@ -134,21 +134,21 @@ key: value`, description: "empty", input: "", exp: raw.Step{ - Key: nil, - Map: nil, - StringVal: nil, - EnvOrRun: nil, + Key: nil, + Map: nil, + StringVal: nil, + CommandMap: nil, }, }, // Errors { - description: "extra args style no slice strings", + description: "extra args style no map strings", input: ` key: - value: - another: map`, - expErr: "yaml: unmarshal errors:\n line 3: cannot unmarshal !!map into string", + - value: + another: map`, + expErr: "yaml: unmarshal errors:\n line 3: cannot unmarshal !!seq into map[string]interface {}", }, } @@ -227,7 +227,7 @@ func TestStep_Validate(t *testing.T) { { description: "env", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "name": "test", "command": "echo 123", @@ -236,6 +236,47 @@ func TestStep_Validate(t *testing.T) { }, expErr: "", }, + { + description: "env shell", + input: raw.Step{ + CommandMap: EnvType{ + "env": { + "name": "test", + "command": "echo 123", + "shell": "bash", + }, + }, + }, + expErr: "", + }, + { + description: "env shellArgs string", + input: raw.Step{ + CommandMap: EnvType{ + "env": { + "name": "test", + "command": "echo 123", + "shell": "bash", + "shellArgs": "-c", + }, + }, + }, + expErr: "", + }, + { + description: "env shellArgs list of strings", + input: raw.Step{ + CommandMap: EnvType{ + "env": { + "name": "test", + "command": "echo 123", + "shell": "bash", + "shellArgs": []interface{}{"-c", "--debug"}, + }, + }, + }, + expErr: "", + }, { description: "apply extra_args", input: raw.Step{ @@ -283,7 +324,7 @@ func TestStep_Validate(t *testing.T) { { description: "multiple keys in env", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "key1": nil, "key2": nil, }, @@ -312,7 +353,7 @@ func TestStep_Validate(t *testing.T) { { description: "invalid key in env", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "invalid": nil, }, }, @@ -353,7 +394,7 @@ func TestStep_Validate(t *testing.T) { { description: "env step with no name key set", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "value": "value", }, @@ -364,19 +405,19 @@ func TestStep_Validate(t *testing.T) { { description: "env step with invalid key", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "abc": "", "invalid2": "", }, }, }, - expErr: "env steps only support keys \"name\", \"value\" and \"command\", found key \"abc\"", + expErr: "env steps only support keys \"name\", \"value\", \"command\", \"shell\" and \"shellArgs\", found key \"abc\"", }, { description: "env step with both command and value set", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "name": "name", "command": "command", @@ -386,6 +427,58 @@ func TestStep_Validate(t *testing.T) { }, expErr: "env steps only support one of the \"value\" or \"command\" keys, found both", }, + { + description: "env step with shell set but not command", + input: raw.Step{ + CommandMap: EnvType{ + "env": { + "name": "name", + "shell": "bash", + }, + }, + }, + expErr: "workflow steps only support \"shell\" key in combination with \"command\" key", + }, + { + description: "env step with shellArgs set but not shell", + input: raw.Step{ + CommandMap: EnvType{ + "env": { + "name": "name", + "shellArgs": "-c", + }, + }, + }, + expErr: "workflow steps only support \"shellArgs\" key in combination with \"shell\" key", + }, + { + description: "run step with shellArgs is not list of strings", + input: raw.Step{ + CommandMap: EnvType{ + "run": { + "name": "name", + "command": "echo", + "shell": "shell", + "shellArgs": []int{42, 42}, + }, + }, + }, + expErr: "\"run\" step \"shellArgs\" option must be a string or a list of strings, found [42 42]\n", + }, + { + description: "run step with shellArgs contain not strings", + input: raw.Step{ + CommandMap: EnvType{ + "run": { + "name": "name", + "command": "echo", + "shell": "shell", + "shellArgs": []interface{}{"-c", 42}, + }, + }, + }, + expErr: "\"run\" step \"shellArgs\" option must contain only strings, found 42\n", + }, { // For atlantis.yaml v2, this wouldn't parse, but now there should // be no error. @@ -454,7 +547,7 @@ func TestStep_ToValid(t *testing.T) { { description: "env step", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "name": "test", "command": "echo 123", @@ -561,7 +654,7 @@ func TestStep_ToValid(t *testing.T) { { description: "run step with output", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: RunType{ "run": { "command": "my 'run command'", "output": "hide", @@ -574,6 +667,34 @@ func TestStep_ToValid(t *testing.T) { Output: "hide", }, }, + { + description: "multienv step", + input: raw.Step{ + StringVal: map[string]string{ + "multienv": "envs.sh", + }, + }, + exp: valid.Step{ + StepName: "multienv", + RunCommand: "envs.sh", + }, + }, + { + description: "multienv step with output", + input: raw.Step{ + CommandMap: MultiEnvType{ + "multienv": { + "command": "envs.sh", + "output": "hide", + }, + }, + }, + exp: valid.Step{ + StepName: "multienv", + RunCommand: "envs.sh", + Output: "hide", + }, + }, } for _, c := range cases { t.Run(c.description, func(t *testing.T) { @@ -583,4 +704,6 @@ func TestStep_ToValid(t *testing.T) { } type MapType map[string]map[string][]string -type EnvOrRunType map[string]map[string]string +type EnvType map[string]map[string]interface{} +type RunType map[string]map[string]interface{} +type MultiEnvType map[string]map[string]interface{} diff --git a/server/core/config/raw/team_authz.go b/server/core/config/raw/team_authz.go new file mode 100644 index 0000000000..6149afa1a9 --- /dev/null +++ b/server/core/config/raw/team_authz.go @@ -0,0 +1,19 @@ +package raw + +import "github.com/runatlantis/atlantis/server/core/config/valid" + +type TeamAuthz struct { + Command string `yaml:"command" json:"command"` + Args []string `yaml:"args" json:"args"` +} + +func (t *TeamAuthz) ToValid() valid.TeamAuthz { + var v valid.TeamAuthz + v.Command = t.Command + v.Args = make([]string, 0) + if t.Args != nil { + v.Args = append(v.Args, t.Args...) + } + + return v +} diff --git a/server/core/config/valid/global_cfg.go b/server/core/config/valid/global_cfg.go index a2c84669e4..a930ef22bc 100644 --- a/server/core/config/valid/global_cfg.go +++ b/server/core/config/valid/global_cfg.go @@ -23,9 +23,13 @@ const AllowCustomWorkflowsKey = "allow_custom_workflows" const DefaultWorkflowName = "default" const DeleteSourceBranchOnMergeKey = "delete_source_branch_on_merge" const RepoLockingKey = "repo_locking" +const RepoLocksKey = "repo_locks" const PolicyCheckKey = "policy_check" const CustomPolicyCheckKey = "custom_policy_check" const AutoDiscoverKey = "autodiscover" +const SilencePRCommentsKey = "silence_pr_comments" + +var AllowedSilencePRComments = []string{"plan", "apply"} // DefaultAtlantisFile is the default name of the config file for each repo. const DefaultAtlantisFile = "atlantis.yaml" @@ -43,6 +47,7 @@ type GlobalCfg struct { Workflows map[string]Workflow PolicySets PolicySets Metrics Metrics + TeamAuthz TeamAuthz } type Metrics struct { @@ -80,9 +85,11 @@ type Repo struct { AllowCustomWorkflows *bool DeleteSourceBranchOnMerge *bool RepoLocking *bool + RepoLocks *RepoLocks PolicyCheck *bool CustomPolicyCheck *bool AutoDiscover *AutoDiscover + SilencePRComments []string } type MergedProjectCfg struct { @@ -97,14 +104,16 @@ type MergedProjectCfg struct { Name string AutoplanEnabled bool AutoMergeDisabled bool + AutoMergeMethod string TerraformVersion *version.Version RepoCfgVersion int PolicySets PolicySets DeleteSourceBranchOnMerge bool ExecutionOrderGroup int - RepoLocking bool + RepoLocks RepoLocks PolicyCheck bool CustomPolicyCheck bool + SilencePRComments []string } // WorkflowHook is a map of custom run commands to run before or after workflows. @@ -207,11 +216,12 @@ func NewGlobalCfgFromArgs(args GlobalCfgArgs) GlobalCfg { allowCustomWorkflows := false deleteSourceBranchOnMerge := false - repoLockingKey := true + repoLocks := DefaultRepoLocks customPolicyCheck := false autoDiscover := AutoDiscover{Mode: AutoDiscoverAutoMode} + var silencePRComments []string if args.AllowAllRepoSettings { - allowedOverrides = []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, DeleteSourceBranchOnMergeKey, RepoLockingKey, PolicyCheckKey} + allowedOverrides = []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, DeleteSourceBranchOnMergeKey, RepoLockingKey, RepoLocksKey, PolicyCheckKey, SilencePRCommentsKey} allowCustomWorkflows = true } @@ -231,15 +241,19 @@ func NewGlobalCfgFromArgs(args GlobalCfgArgs) GlobalCfg { AllowedOverrides: allowedOverrides, AllowCustomWorkflows: &allowCustomWorkflows, DeleteSourceBranchOnMerge: &deleteSourceBranchOnMerge, - RepoLocking: &repoLockingKey, + RepoLocks: &repoLocks, PolicyCheck: &policyCheck, CustomPolicyCheck: &customPolicyCheck, AutoDiscover: &autoDiscover, + SilencePRComments: silencePRComments, }, }, Workflows: map[string]Workflow{ DefaultWorkflowName: defaultWorkflow, }, + TeamAuthz: TeamAuthz{ + Args: make([]string, 0), + }, } } @@ -271,8 +285,7 @@ func (r Repo) IDString() string { // final config. It assumes that all configs have been validated. func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, proj Project, rCfg RepoCfg) MergedProjectCfg { log.Debug("MergeProjectCfg started") - planReqs, applyReqs, importReqs, workflow, allowedOverrides, allowCustomWorkflows, deleteSourceBranchOnMerge, repoLocking, policyCheck, customPolicyCheck, _ := g.getMatchingCfg(log, repoID) - + planReqs, applyReqs, importReqs, workflow, allowedOverrides, allowCustomWorkflows, deleteSourceBranchOnMerge, repoLocks, policyCheck, customPolicyCheck, _, silencePRComments := g.getMatchingCfg(log, repoID) // If repos are allowed to override certain keys then override them. for _, key := range allowedOverrides { switch key { @@ -335,8 +348,26 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro case RepoLockingKey: if proj.RepoLocking != nil { log.Debug("overriding server-defined %s with repo settings: [%t]", RepoLockingKey, *proj.RepoLocking) - repoLocking = *proj.RepoLocking + if *proj.RepoLocking && repoLocks.Mode == RepoLocksDisabledMode { + repoLocks.Mode = DefaultRepoLocksMode + } else if !*proj.RepoLocking { + repoLocks.Mode = RepoLocksDisabledMode + } + } + case RepoLocksKey: + //We check whether the server configured value and repo-root level + //config is different. If it is then we change to the more granular. + if rCfg.RepoLocks != nil && repoLocks.Mode != rCfg.RepoLocks.Mode { + log.Debug("overriding server-defined %s with repo settings: [%#v]", RepoLocksKey, rCfg.RepoLocks) + repoLocks = *rCfg.RepoLocks + } + //Then we check whether the more granular project based config is + //different. If it is then we set it. + if proj.RepoLocks != nil && repoLocks.Mode != proj.RepoLocks.Mode { + log.Debug("overriding repo-root-defined %s with repo settings: [%#v]", RepoLocksKey, *proj.RepoLocks) + repoLocks = *proj.RepoLocks } + log.Debug("merged repoLocks: [%#v]", repoLocks) case PolicyCheckKey: if proj.PolicyCheck != nil { log.Debug("overriding server-defined %s with repo settings: [%t]", PolicyCheckKey, *proj.PolicyCheck) @@ -347,12 +378,29 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro log.Debug("overriding server-defined %s with repo settings: [%t]", CustomPolicyCheckKey, *proj.CustomPolicyCheck) customPolicyCheck = *proj.CustomPolicyCheck } + case SilencePRCommentsKey: + if proj.SilencePRComments != nil { + log.Debug("overriding repo-root-defined %s with repo settings: [%t]", SilencePRCommentsKey, strings.Join(proj.SilencePRComments, ",")) + silencePRComments = proj.SilencePRComments + } else if rCfg.SilencePRComments != nil { + log.Debug("overriding server-defined %s with repo settings: [%s]", SilencePRCommentsKey, strings.Join(rCfg.SilencePRComments, ",")) + silencePRComments = rCfg.SilencePRComments + } } log.Debug("MergeProjectCfg completed") } - log.Debug("final settings: %s: [%s], %s: [%s], %s: [%s], %s: %s", - PlanRequirementsKey, strings.Join(planReqs, ","), ApplyRequirementsKey, strings.Join(applyReqs, ","), ImportRequirementsKey, strings.Join(importReqs, ","), WorkflowKey, workflow.Name) + log.Debug("final settings: %s: [%s], %s: [%s], %s: [%s], %s: %s, %s: %t, %s: %s, %s: %t, %s: %t, %s: [%s]", + PlanRequirementsKey, strings.Join(planReqs, ","), + ApplyRequirementsKey, strings.Join(applyReqs, ","), + ImportRequirementsKey, strings.Join(importReqs, ","), + WorkflowKey, workflow.Name, + DeleteSourceBranchOnMergeKey, deleteSourceBranchOnMerge, + RepoLockingKey, repoLocks.Mode, + PolicyCheckKey, policyCheck, + CustomPolicyCheckKey, policyCheck, + SilencePRCommentsKey, strings.Join(silencePRComments, ","), + ) return MergedProjectCfg{ PlanRequirements: planReqs, @@ -369,9 +417,10 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro PolicySets: g.PolicySets, DeleteSourceBranchOnMerge: deleteSourceBranchOnMerge, ExecutionOrderGroup: proj.ExecutionOrderGroup, - RepoLocking: repoLocking, + RepoLocks: repoLocks, PolicyCheck: policyCheck, CustomPolicyCheck: customPolicyCheck, + SilencePRComments: silencePRComments, } } @@ -379,7 +428,7 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro // repo with id repoID. It is used when there is no repo config. func (g GlobalCfg) DefaultProjCfg(log logging.SimpleLogging, repoID string, repoRelDir string, workspace string) MergedProjectCfg { log.Debug("building config based on server-side config") - planReqs, applyReqs, importReqs, workflow, _, _, deleteSourceBranchOnMerge, repoLocking, policyCheck, customPolicyCheck, _ := g.getMatchingCfg(log, repoID) + planReqs, applyReqs, importReqs, workflow, _, _, deleteSourceBranchOnMerge, repoLocks, policyCheck, customPolicyCheck, _, silencePRComments := g.getMatchingCfg(log, repoID) return MergedProjectCfg{ PlanRequirements: planReqs, ApplyRequirements: applyReqs, @@ -392,9 +441,10 @@ func (g GlobalCfg) DefaultProjCfg(log logging.SimpleLogging, repoID string, repo TerraformVersion: nil, PolicySets: g.PolicySets, DeleteSourceBranchOnMerge: deleteSourceBranchOnMerge, - RepoLocking: repoLocking, + RepoLocks: repoLocks, PolicyCheck: policyCheck, CustomPolicyCheck: customPolicyCheck, + SilencePRComments: silencePRComments, } } @@ -412,7 +462,6 @@ func (g GlobalCfg) RepoAutoDiscoverCfg(repoID string) *AutoDiscover { // ValidateRepoCfg validates that rCfg for repo with id repoID is valid based // on our global config. func (g GlobalCfg) ValidateRepoCfg(rCfg RepoCfg, repoID string) error { - mapContainsF := func(m map[string]Workflow, key string) bool { for k := range m { if k == key { @@ -450,9 +499,32 @@ func (g GlobalCfg) ValidateRepoCfg(rCfg RepoCfg, repoID string) error { if p.RepoLocking != nil && !utils.SlicesContains(allowedOverrides, RepoLockingKey) { return fmt.Errorf("repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", RepoLockingKey, AllowedOverridesKey, RepoLockingKey) } + if p.RepoLocks != nil && !utils.SlicesContains(allowedOverrides, RepoLocksKey) { + return fmt.Errorf("repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", RepoLocksKey, AllowedOverridesKey, RepoLocksKey) + } if p.CustomPolicyCheck != nil && !utils.SlicesContains(allowedOverrides, CustomPolicyCheckKey) { return fmt.Errorf("repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", CustomPolicyCheckKey, AllowedOverridesKey, CustomPolicyCheckKey) } + if p.SilencePRComments != nil { + if !utils.SlicesContains(allowedOverrides, SilencePRCommentsKey) { + return fmt.Errorf( + "repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", + SilencePRCommentsKey, + AllowedOverridesKey, + SilencePRCommentsKey, + ) + } + for _, silenceStage := range p.SilencePRComments { + if !utils.SlicesContains(AllowedSilencePRComments, silenceStage) { + return fmt.Errorf( + "repo config '%s' key value of '%s' is not supported, supported values are [%s]", + SilencePRCommentsKey, + silenceStage, + strings.Join(AllowedSilencePRComments, ", "), + ) + } + } + } } // Check custom workflows. @@ -511,7 +583,7 @@ func (g GlobalCfg) ValidateRepoCfg(rCfg RepoCfg, repoID string) error { } // getMatchingCfg returns the key settings for repoID. -func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (planReqs []string, applyReqs []string, importReqs []string, workflow Workflow, allowedOverrides []string, allowCustomWorkflows bool, deleteSourceBranchOnMerge bool, repoLocking bool, policyCheck bool, customPolicyCheck bool, autoDiscover AutoDiscover) { +func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (planReqs []string, applyReqs []string, importReqs []string, workflow Workflow, allowedOverrides []string, allowCustomWorkflows bool, deleteSourceBranchOnMerge bool, repoLocks RepoLocks, policyCheck bool, customPolicyCheck bool, autoDiscover AutoDiscover, silencePRComments []string) { toLog := make(map[string]string) traceF := func(repoIdx int, repoID string, key string, val interface{}) string { from := "default server config" @@ -535,8 +607,10 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla // Can't use raw.DefaultAutoDiscoverMode() because of an import cycle. Should refactor to avoid that. autoDiscover = AutoDiscover{Mode: AutoDiscoverAutoMode} + repoLocking := true + repoLocks = DefaultRepoLocks - for _, key := range []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, AllowedOverridesKey, AllowCustomWorkflowsKey, DeleteSourceBranchOnMergeKey, RepoLockingKey, PolicyCheckKey, CustomPolicyCheckKey} { + for _, key := range []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, AllowedOverridesKey, AllowCustomWorkflowsKey, DeleteSourceBranchOnMergeKey, RepoLockingKey, RepoLocksKey, PolicyCheckKey, CustomPolicyCheckKey, SilencePRCommentsKey} { for i, repo := range g.Repos { if repo.IDMatches(repoID) { switch key { @@ -580,6 +654,11 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla toLog[RepoLockingKey] = traceF(i, repo.IDString(), RepoLockingKey, *repo.RepoLocking) repoLocking = *repo.RepoLocking } + case RepoLocksKey: + if repo.RepoLocks != nil { + toLog[RepoLocksKey] = traceF(i, repo.IDString(), RepoLocksKey, repo.RepoLocks.Mode) + repoLocks = *repo.RepoLocks + } case PolicyCheckKey: if repo.PolicyCheck != nil { toLog[PolicyCheckKey] = traceF(i, repo.IDString(), PolicyCheckKey, *repo.PolicyCheck) @@ -595,6 +674,11 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla toLog[AutoDiscoverKey] = traceF(i, repo.IDString(), AutoDiscoverKey, repo.AutoDiscover.Mode) autoDiscover = *repo.AutoDiscover } + case SilencePRCommentsKey: + if repo.SilencePRComments != nil { + toLog[SilencePRCommentsKey] = traceF(i, repo.IDString(), SilencePRCommentsKey, repo.SilencePRComments) + silencePRComments = repo.SilencePRComments + } } } } @@ -602,6 +686,10 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla for _, l := range toLog { log.Debug(l) } + // repoLocking is deprecated and enabled by default, disable repo locks if it is explicitly disabled + if !repoLocking { + repoLocks.Mode = RepoLocksDisabledMode + } return } diff --git a/server/core/config/valid/global_cfg_test.go b/server/core/config/valid/global_cfg_test.go index 5c9cfc919a..f247723a3e 100644 --- a/server/core/config/valid/global_cfg_test.go +++ b/server/core/config/valid/global_cfg_test.go @@ -80,7 +80,7 @@ func TestNewGlobalCfg(t *testing.T) { AllowedOverrides: []string{}, AllowCustomWorkflows: Bool(false), DeleteSourceBranchOnMerge: Bool(false), - RepoLocking: Bool(true), + RepoLocks: &valid.DefaultRepoLocks, PolicyCheck: Bool(false), CustomPolicyCheck: Bool(false), AutoDiscover: raw.DefaultAutoDiscover(), @@ -89,6 +89,9 @@ func TestNewGlobalCfg(t *testing.T) { Workflows: map[string]valid.Workflow{ "default": expDefaultWorkflow, }, + TeamAuthz: valid.TeamAuthz{ + Args: make([]string, 0), + }, } cases := []struct { @@ -129,7 +132,7 @@ func TestNewGlobalCfg(t *testing.T) { if c.allowAllRepoSettings { exp.Repos[0].AllowCustomWorkflows = Bool(true) - exp.Repos[0].AllowedOverrides = []string{"plan_requirements", "apply_requirements", "import_requirements", "workflow", "delete_source_branch_on_merge", "repo_locking", "policy_check"} + exp.Repos[0].AllowedOverrides = []string{"plan_requirements", "apply_requirements", "import_requirements", "workflow", "delete_source_branch_on_merge", "repo_locking", "repo_locks", "policy_check", "silence_pr_comments"} } if c.policyCheckEnabled { exp.Repos[0].PlanRequirements = append(exp.Repos[0].PlanRequirements, "policies_passed") @@ -569,7 +572,7 @@ policies: Workspace: "default", Name: "", AutoplanEnabled: false, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -618,7 +621,7 @@ policies: Workspace: "default", Name: "", AutoplanEnabled: false, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -707,7 +710,7 @@ workflows: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -737,7 +740,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -767,7 +770,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -798,7 +801,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, PolicyCheck: true, }, @@ -829,7 +832,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, PolicyCheck: false, }, @@ -860,7 +863,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -877,7 +880,6 @@ repos: PlanRequirements: []string{}, ApplyRequirements: []string{}, ImportRequirements: []string{}, - RepoLocking: Bool(true), CustomPolicyCheck: Bool(false), }, repoWorkflows: nil, @@ -891,7 +893,39 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: false, + RepoLocks: valid.RepoLocks{Mode: valid.RepoLocksDisabledMode}, + CustomPolicyCheck: false, + }, + }, + "repo-side repo_locks win out if allowed": { + gCfg: ` +repos: +- id: /.*/ + repo_locks: + mode: on_apply +`, + repoID: "github.com/owner/repo", + proj: valid.Project{ + Dir: ".", + Workspace: "default", + PlanRequirements: []string{}, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RepoLocks: &valid.DefaultRepoLocks, + CustomPolicyCheck: Bool(false), + }, + repoWorkflows: nil, + exp: valid.MergedProjectCfg{ + PlanRequirements: []string{}, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + Workflow: defaultWorkflow, + RepoRelDir: ".", + Workspace: "default", + Name: "", + AutoplanEnabled: false, + PolicySets: emptyPolicySets, + RepoLocks: valid.RepoLocks{Mode: valid.RepoLocksOnApplyMode}, CustomPolicyCheck: false, }, }, @@ -928,7 +962,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -955,7 +989,7 @@ repos: Name: "myname", AutoplanEnabled: true, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -984,7 +1018,7 @@ repos: AutoplanEnabled: true, PolicySets: emptyPolicySets, ExecutionOrderGroup: 10, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -1172,7 +1206,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: false, CustomPolicyCheck: false, }, @@ -1212,7 +1246,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: true, CustomPolicyCheck: false, }, @@ -1253,7 +1287,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: false, CustomPolicyCheck: false, }, @@ -1294,7 +1328,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: false, CustomPolicyCheck: false, }, @@ -1335,7 +1369,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: true, // Project will have policy check as true but since it is globally disable it wont actually run CustomPolicyCheck: false, }, diff --git a/server/core/config/valid/policies.go b/server/core/config/valid/policies.go index 8fb6cfdc91..6aee54179c 100644 --- a/server/core/config/valid/policies.go +++ b/server/core/config/valid/policies.go @@ -27,11 +27,12 @@ type PolicyOwners struct { } type PolicySet struct { - Source string - Path string - Name string - ApproveCount int - Owners PolicyOwners + Source string + Path string + Name string + ApproveCount int + Owners PolicyOwners + PreventSelfApprove bool } func (p *PolicySets) HasPolicies() bool { diff --git a/server/core/config/valid/repo_cfg.go b/server/core/config/valid/repo_cfg.go index 95e36b1f27..e42e60158b 100644 --- a/server/core/config/valid/repo_cfg.go +++ b/server/core/config/valid/repo_cfg.go @@ -24,11 +24,12 @@ type RepoCfg struct { ParallelPlan *bool ParallelPolicyCheck *bool DeleteSourceBranchOnMerge *bool - RepoLocking *bool + RepoLocks *RepoLocks CustomPolicyCheck *bool EmojiReaction string AllowedRegexpPrefixes []string AbortOnExcecutionOrderFail bool + SilencePRComments []string } func (r RepoCfg) FindProjectsByDirWorkspace(repoRelDir string, workspace string) []Project { @@ -154,9 +155,11 @@ type Project struct { DependsOn []string DeleteSourceBranchOnMerge *bool RepoLocking *bool + RepoLocks *RepoLocks ExecutionOrderGroup int PolicyCheck *bool CustomPolicyCheck *bool + SilencePRComments []string } // GetName returns the name of the project or an empty string if there is no @@ -186,6 +189,16 @@ type Stage struct { Steps []Step } +// CommandShell sets up the shell for command execution +type CommandShell struct { + Shell string + ShellArgs []string +} + +func (s CommandShell) String() string { + return fmt.Sprintf("%s %s", s.Shell, strings.Join(s.ShellArgs, " ")) +} + type Step struct { StepName string ExtraArgs []string @@ -199,6 +212,8 @@ type Step struct { EnvVarName string // EnvVarValue is the value to set EnvVarName to. EnvVarValue string + // The Shell to use for RunCommand execution. + RunShell *CommandShell } type Workflow struct { diff --git a/server/core/config/valid/repo_locks.go b/server/core/config/valid/repo_locks.go new file mode 100644 index 0000000000..7a4a77a873 --- /dev/null +++ b/server/core/config/valid/repo_locks.go @@ -0,0 +1,19 @@ +package valid + +// RepoLocksMode enum +type RepoLocksMode string + +var DefaultRepoLocksMode = RepoLocksOnPlanMode +var DefaultRepoLocks = RepoLocks{ + Mode: DefaultRepoLocksMode, +} + +const ( + RepoLocksDisabledMode RepoLocksMode = "disabled" + RepoLocksOnPlanMode RepoLocksMode = "on_plan" + RepoLocksOnApplyMode RepoLocksMode = "on_apply" +) + +type RepoLocks struct { + Mode RepoLocksMode +} diff --git a/server/core/config/valid/team_authz.go b/server/core/config/valid/team_authz.go new file mode 100644 index 0000000000..863b3d5c63 --- /dev/null +++ b/server/core/config/valid/team_authz.go @@ -0,0 +1,6 @@ +package valid + +type TeamAuthz struct { + Command string `yaml:"command" json:"command"` + Args []string `yaml:"args" json:"args"` +} diff --git a/server/core/redis/redis.go b/server/core/redis/redis.go index 030cd15b82..2afe336ce4 100644 --- a/server/core/redis/redis.go +++ b/server/core/redis/redis.go @@ -238,7 +238,7 @@ func (r *RedisDB) CheckCommandLock(cmdName command.Name) (*command.Lock, error) return &cmdLock, err } -// UpdatePullWithResults updates pull's status with the latest project results. +// UpdateProjectStatus updates pull's status with the latest project results. // It returns the new PullStatus object. func (r *RedisDB) UpdateProjectStatus(pull models.PullRequest, workspace string, repoRelDir string, newStatus models.ProjectPlanStatus) error { key, err := r.pullKey(pull) diff --git a/server/core/runtime/apply_step_runner.go b/server/core/runtime/apply_step_runner.go index eb1633eea0..2e223f2996 100644 --- a/server/core/runtime/apply_step_runner.go +++ b/server/core/runtime/apply_step_runner.go @@ -12,6 +12,7 @@ import ( version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/utils" ) // ApplyStepRunner runs `terraform apply`. @@ -56,7 +57,7 @@ func (a *ApplyStepRunner) Run(ctx command.ProjectContext, extraArgs []string, pa // If the apply was successful, delete the plan. if err == nil { ctx.Log.Info("apply successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { + if removeErr := utils.RemoveIgnoreNonExistent(planPath); removeErr != nil { ctx.Log.Warn("failed to delete planfile after successful apply: %s", removeErr) } } @@ -116,7 +117,6 @@ func (a *ApplyStepRunner) runRemoteApply( absPlanPath string, tfVersion *version.Version, envs map[string]string) (string, error) { - // The planfile contents are needed to ensure that the plan didn't change // between plan and apply phases. planfileBytes, err := os.ReadFile(absPlanPath) diff --git a/server/core/runtime/env_step_runner.go b/server/core/runtime/env_step_runner.go index eb6556c182..5fa865fefd 100644 --- a/server/core/runtime/env_step_runner.go +++ b/server/core/runtime/env_step_runner.go @@ -15,13 +15,20 @@ type EnvStepRunner struct { // Run runs the env step command. // value is the value for the environment variable. If set this is returned as // the value. Otherwise command is run and its output is the value returned. -func (r *EnvStepRunner) Run(ctx command.ProjectContext, command string, value string, path string, envs map[string]string) (string, error) { +func (r *EnvStepRunner) Run( + ctx command.ProjectContext, + shell *valid.CommandShell, + command string, + value string, + path string, + envs map[string]string, +) (string, error) { if value != "" { return value, nil } // Pass `false` for streamOutput because this isn't interesting to the user reading the build logs // in the web UI. - res, err := r.RunStepRunner.Run(ctx, command, path, envs, false, valid.PostProcessRunOutputShow) + res, err := r.RunStepRunner.Run(ctx, shell, command, path, envs, false, valid.PostProcessRunOutputShow) // Trim newline from res to support running `echo env_value` which has // a newline. We don't recommend users run echo -n env_value to remove the // newline because -n doesn't work in the sh shell which is what we use diff --git a/server/core/runtime/env_step_runner_test.go b/server/core/runtime/env_step_runner_test.go index a26b5c1a93..0fe86f77f0 100644 --- a/server/core/runtime/env_step_runner_test.go +++ b/server/core/runtime/env_step_runner_test.go @@ -77,7 +77,7 @@ func TestEnvStepRunner_Run(t *testing.T) { TerraformVersion: tfVersion, ProjectName: c.ProjectName, } - value, err := envRunner.Run(ctx, c.Command, c.Value, tmpDir, map[string]string(nil)) + value, err := envRunner.Run(ctx, nil, c.Command, c.Value, tmpDir, map[string]string(nil)) if c.ExpErr != "" { ErrContains(t, c.ExpErr, err) return diff --git a/server/core/runtime/external_team_allowlist_runner.go b/server/core/runtime/external_team_allowlist_runner.go new file mode 100644 index 0000000000..ec95d1c19b --- /dev/null +++ b/server/core/runtime/external_team_allowlist_runner.go @@ -0,0 +1,58 @@ +package runtime + +import ( + "context" + "fmt" + "os" + "os/exec" + "strings" + + "github.com/runatlantis/atlantis/server/events/models" +) + +//go:generate pegomock generate --package mocks -o mocks/mock_external_team_allowlist_runner.go ExternalTeamAllowlistRunner +type ExternalTeamAllowlistRunner interface { + Run(ctx models.TeamAllowlistCheckerContext, shell, shellArgs, command string) (string, error) +} + +type DefaultExternalTeamAllowlistRunner struct{} + +func (r DefaultExternalTeamAllowlistRunner) Run(ctx models.TeamAllowlistCheckerContext, shell, shellArgs, command string) (string, error) { + shellArgsSlice := append(strings.Split(shellArgs, " "), command) + cmd := exec.CommandContext(context.TODO(), shell, shellArgsSlice...) // #nosec + + baseEnvVars := os.Environ() + customEnvVars := map[string]string{ + "BASE_BRANCH_NAME": ctx.Pull.BaseBranch, + "BASE_REPO_NAME": ctx.BaseRepo.Name, + "BASE_REPO_OWNER": ctx.BaseRepo.Owner, + "COMMENT_ARGS": strings.Join(ctx.EscapedCommentArgs, ","), + "HEAD_BRANCH_NAME": ctx.Pull.HeadBranch, + "HEAD_COMMIT": ctx.Pull.HeadCommit, + "HEAD_REPO_NAME": ctx.HeadRepo.Name, + "HEAD_REPO_OWNER": ctx.HeadRepo.Owner, + "PULL_AUTHOR": ctx.Pull.Author, + "PULL_NUM": fmt.Sprintf("%d", ctx.Pull.Num), + "PULL_URL": ctx.Pull.URL, + "USER_NAME": ctx.User.Username, + "COMMAND_NAME": ctx.CommandName, + "PROJECT_NAME": ctx.ProjectName, + "REPO_ROOT": ctx.RepoDir, + "REPO_REL_PATH": ctx.RepoRelDir, + } + + finalEnvVars := baseEnvVars + for key, val := range customEnvVars { + finalEnvVars = append(finalEnvVars, fmt.Sprintf("%s=%s", key, val)) + } + cmd.Env = finalEnvVars + out, err := cmd.CombinedOutput() + + if err != nil { + err = fmt.Errorf("%s: running %q: \n%s", err, shell+" "+shellArgs+" "+command, out) + ctx.Log.Debug("error: %s", err) + return string(out), err + } + + return strings.TrimSpace(string(out)), nil +} diff --git a/server/core/runtime/import_step_runner.go b/server/core/runtime/import_step_runner.go index 2f4cb8c51c..0d5787a8ad 100644 --- a/server/core/runtime/import_step_runner.go +++ b/server/core/runtime/import_step_runner.go @@ -6,6 +6,7 @@ import ( version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/utils" ) type importStepRunner struct { @@ -37,7 +38,7 @@ func (p *importStepRunner) Run(ctx command.ProjectContext, extraArgs []string, p if err == nil { if _, planPathErr := os.Stat(planPath); !os.IsNotExist(planPathErr) { ctx.Log.Info("import successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { + if removeErr := utils.RemoveIgnoreNonExistent(planPath); removeErr != nil { ctx.Log.Warn("failed to delete planfile after successful import: %s", removeErr) } } diff --git a/server/core/runtime/init_step_runner.go b/server/core/runtime/init_step_runner.go index cd3ab32810..0c6de1b013 100644 --- a/server/core/runtime/init_step_runner.go +++ b/server/core/runtime/init_step_runner.go @@ -1,12 +1,12 @@ package runtime import ( - "os" "path/filepath" version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/core/runtime/common" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/utils" ) // InitStep runs `terraform init`. @@ -21,14 +21,13 @@ func (i *InitStepRunner) Run(ctx command.ProjectContext, extraArgs []string, pat terraformLockFileTracked, err := common.IsFileTracked(path, lockFileName) if err != nil { ctx.Log.Warn("Error checking if %s is tracked in %s", lockFileName, path) - } // If .terraform.lock.hcl is not tracked in git and it exists prior to init // delete it as it probably has been created by a previous run of // terraform init if common.FileExists(terraformLockfilePath) && !terraformLockFileTracked { ctx.Log.Debug("Deleting `%s` that was generated by previous terraform init", terraformLockfilePath) - delErr := os.Remove(terraformLockfilePath) + delErr := utils.RemoveIgnoreNonExistent(terraformLockfilePath) if delErr != nil { ctx.Log.Info("Error Deleting `%s`", lockFileName) } diff --git a/server/core/runtime/mocks/mock_external_team_allowlist_runner.go b/server/core/runtime/mocks/mock_external_team_allowlist_runner.go new file mode 100644 index 0000000000..3918bbb9d7 --- /dev/null +++ b/server/core/runtime/mocks/mock_external_team_allowlist_runner.go @@ -0,0 +1,129 @@ +// Code generated by pegomock. DO NOT EDIT. +// Source: github.com/runatlantis/atlantis/server/core/runtime (interfaces: ExternalTeamAllowlistRunner) + +package mocks + +import ( + pegomock "github.com/petergtz/pegomock/v4" + models "github.com/runatlantis/atlantis/server/events/models" + "reflect" + "time" +) + +type MockExternalTeamAllowlistRunner struct { + fail func(message string, callerSkip ...int) +} + +func NewMockExternalTeamAllowlistRunner(options ...pegomock.Option) *MockExternalTeamAllowlistRunner { + mock := &MockExternalTeamAllowlistRunner{} + for _, option := range options { + option.Apply(mock) + } + return mock +} + +func (mock *MockExternalTeamAllowlistRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } +func (mock *MockExternalTeamAllowlistRunner) FailHandler() pegomock.FailHandler { return mock.fail } + +func (mock *MockExternalTeamAllowlistRunner) Run(ctx models.TeamAllowlistCheckerContext, shell string, shellArgs string, command string) (string, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockExternalTeamAllowlistRunner().") + } + _params := []pegomock.Param{ctx, shell, shellArgs, command} + _result := pegomock.GetGenericMockFrom(mock).Invoke("Run", _params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var _ret0 string + var _ret1 error + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].(string) + } + if _result[1] != nil { + _ret1 = _result[1].(error) + } + } + return _ret0, _ret1 +} + +func (mock *MockExternalTeamAllowlistRunner) VerifyWasCalledOnce() *VerifierMockExternalTeamAllowlistRunner { + return &VerifierMockExternalTeamAllowlistRunner{ + mock: mock, + invocationCountMatcher: pegomock.Times(1), + } +} + +func (mock *MockExternalTeamAllowlistRunner) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockExternalTeamAllowlistRunner { + return &VerifierMockExternalTeamAllowlistRunner{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + } +} + +func (mock *MockExternalTeamAllowlistRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockExternalTeamAllowlistRunner { + return &VerifierMockExternalTeamAllowlistRunner{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + inOrderContext: inOrderContext, + } +} + +func (mock *MockExternalTeamAllowlistRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockExternalTeamAllowlistRunner { + return &VerifierMockExternalTeamAllowlistRunner{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + timeout: timeout, + } +} + +type VerifierMockExternalTeamAllowlistRunner struct { + mock *MockExternalTeamAllowlistRunner + invocationCountMatcher pegomock.InvocationCountMatcher + inOrderContext *pegomock.InOrderContext + timeout time.Duration +} + +func (verifier *VerifierMockExternalTeamAllowlistRunner) Run(ctx models.TeamAllowlistCheckerContext, shell string, shellArgs string, command string) *MockExternalTeamAllowlistRunner_Run_OngoingVerification { + _params := []pegomock.Param{ctx, shell, shellArgs, command} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", _params, verifier.timeout) + return &MockExternalTeamAllowlistRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockExternalTeamAllowlistRunner_Run_OngoingVerification struct { + mock *MockExternalTeamAllowlistRunner + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockExternalTeamAllowlistRunner_Run_OngoingVerification) GetCapturedArguments() (models.TeamAllowlistCheckerContext, string, string, string) { + ctx, shell, shellArgs, command := c.GetAllCapturedArguments() + return ctx[len(ctx)-1], shell[len(shell)-1], shellArgs[len(shellArgs)-1], command[len(command)-1] +} + +func (c *MockExternalTeamAllowlistRunner_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []models.TeamAllowlistCheckerContext, _param1 []string, _param2 []string, _param3 []string) { + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]models.TeamAllowlistCheckerContext, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(models.TeamAllowlistCheckerContext) + } + } + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } + } + if len(_params) > 2 { + _param2 = make([]string, len(c.methodInvocations)) + for u, param := range _params[2] { + _param2[u] = param.(string) + } + } + if len(_params) > 3 { + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range _params[3] { + _param3[u] = param.(string) + } + } + } + return +} diff --git a/server/core/runtime/mocks/mock_pull_approved_checker.go b/server/core/runtime/mocks/mock_pull_approved_checker.go index 13e1a3a834..fc43172cee 100644 --- a/server/core/runtime/mocks/mock_pull_approved_checker.go +++ b/server/core/runtime/mocks/mock_pull_approved_checker.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockPullApprovedChecker(options ...pegomock.Option) *MockPullApprovedChe func (mock *MockPullApprovedChecker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockPullApprovedChecker) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { +func (mock *MockPullApprovedChecker) PullIsApproved(logger logging.SimpleLogging, baseRepo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockPullApprovedChecker().") } - params := []pegomock.Param{baseRepo, pull} + params := []pegomock.Param{logger, baseRepo, pull} result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*models.ApprovalStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 models.ApprovalStatus var ret1 error @@ -81,8 +82,8 @@ type VerifierMockPullApprovedChecker struct { timeout time.Duration } -func (verifier *VerifierMockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) *MockPullApprovedChecker_PullIsApproved_OngoingVerification { - params := []pegomock.Param{baseRepo, pull} +func (verifier *VerifierMockPullApprovedChecker) PullIsApproved(logger logging.SimpleLogging, baseRepo models.Repo, pull models.PullRequest) *MockPullApprovedChecker_PullIsApproved_OngoingVerification { + params := []pegomock.Param{logger, baseRepo, pull} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) return &MockPullApprovedChecker_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -92,21 +93,25 @@ type MockPullApprovedChecker_PullIsApproved_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - baseRepo, pull := c.GetAllCapturedArguments() - return baseRepo[len(baseRepo)-1], pull[len(pull)-1] +func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, baseRepo, pull := c.GetAllCapturedArguments() + return logger[len(logger)-1], baseRepo[len(baseRepo)-1], pull[len(pull)-1] } -func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return diff --git a/server/core/runtime/models/shell_command_runner.go b/server/core/runtime/models/shell_command_runner.go index 3dcd56dd8a..7271f6789e 100644 --- a/server/core/runtime/models/shell_command_runner.go +++ b/server/core/runtime/models/shell_command_runner.go @@ -9,6 +9,7 @@ import ( "time" "github.com/pkg/errors" + "github.com/runatlantis/atlantis/server/core/config/valid" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/terraform/ansi" "github.com/runatlantis/atlantis/server/jobs" @@ -33,10 +34,27 @@ type ShellCommandRunner struct { outputHandler jobs.ProjectCommandOutputHandler streamOutput bool cmd *exec.Cmd + shell *valid.CommandShell } -func NewShellCommandRunner(command string, environ []string, workingDir string, streamOutput bool, outputHandler jobs.ProjectCommandOutputHandler) *ShellCommandRunner { - cmd := exec.Command("sh", "-c", command) // #nosec +func NewShellCommandRunner( + shell *valid.CommandShell, + command string, + environ []string, + workingDir string, + streamOutput bool, + outputHandler jobs.ProjectCommandOutputHandler, +) *ShellCommandRunner { + if shell == nil { + shell = &valid.CommandShell{ + Shell: "sh", + ShellArgs: []string{"-c"}, + } + } + var args []string + args = append(args, shell.ShellArgs...) + args = append(args, command) + cmd := exec.Command(shell.Shell, args...) // #nosec cmd.Env = environ cmd.Dir = workingDir @@ -46,6 +64,7 @@ func NewShellCommandRunner(command string, environ []string, workingDir string, outputHandler: outputHandler, streamOutput: streamOutput, cmd: cmd, + shell: shell, } } @@ -92,10 +111,10 @@ func (s *ShellCommandRunner) RunCommandAsync(ctx command.ProjectContext) (chan<- stderr, _ := s.cmd.StderrPipe() stdin, _ := s.cmd.StdinPipe() - ctx.Log.Debug("starting %q in %q", s.command, s.workingDir) + ctx.Log.Debug("starting '%s %q' in '%s'", s.shell.String(), s.command, s.workingDir) err := s.cmd.Start() if err != nil { - err = errors.Wrapf(err, "running %q in %q", s.command, s.workingDir) + err = errors.Wrapf(err, "running '%s %q' in '%s'", s.shell.String(), s.command, s.workingDir) ctx.Log.Err(err.Error()) outCh <- Line{Err: err} return @@ -154,11 +173,13 @@ func (s *ShellCommandRunner) RunCommandAsync(ctx command.ProjectContext) (chan<- // We're done now. Send an error if there was one. if err != nil { - err = errors.Wrapf(err, "running %q in %q", s.command, s.workingDir) + err = errors.Wrapf(err, "running '%s %q' in '%s'", + s.shell.String(), s.command, s.workingDir) log.Err(err.Error()) outCh <- Line{Err: err} } else { - log.Info("successfully ran %q in %q", s.command, s.workingDir) + log.Info("successfully ran '%s %q' in '%s'", + s.shell.String(), s.command, s.workingDir) } }() diff --git a/server/core/runtime/models/shell_command_runner_test.go b/server/core/runtime/models/shell_command_runner_test.go index 0555c7144c..e8edc32fb1 100644 --- a/server/core/runtime/models/shell_command_runner_test.go +++ b/server/core/runtime/models/shell_command_runner_test.go @@ -54,7 +54,7 @@ func TestShellCommandRunner_Run(t *testing.T) { expectedOutput := fmt.Sprintf("%s\n", strings.Join(c.ExpLines, "\n")) // Run once with streaming enabled - runner := models.NewShellCommandRunner(c.Command, environ, cwd, true, projectCmdOutputHandler) + runner := models.NewShellCommandRunner(nil, c.Command, environ, cwd, true, projectCmdOutputHandler) output, err := runner.Run(ctx) Ok(t, err) Equals(t, expectedOutput, output) @@ -68,7 +68,7 @@ func TestShellCommandRunner_Run(t *testing.T) { // command output handler should not have received anything projectCmdOutputHandler = mocks.NewMockProjectCommandOutputHandler() - runner = models.NewShellCommandRunner(c.Command, environ, cwd, false, projectCmdOutputHandler) + runner = models.NewShellCommandRunner(nil, c.Command, environ, cwd, false, projectCmdOutputHandler) output, err = runner.Run(ctx) Ok(t, err) Equals(t, expectedOutput, output) diff --git a/server/core/runtime/multienv_step_runner.go b/server/core/runtime/multienv_step_runner.go index 515eb66896..6e4434111f 100644 --- a/server/core/runtime/multienv_step_runner.go +++ b/server/core/runtime/multienv_step_runner.go @@ -16,32 +16,46 @@ type MultiEnvStepRunner struct { // Run runs the multienv step command. // The command must return a json string containing the array of name-value pairs that are being added as extra environment variables -func (r *MultiEnvStepRunner) Run(ctx command.ProjectContext, command string, path string, envs map[string]string) (string, error) { - res, err := r.RunStepRunner.Run(ctx, command, path, envs, false, valid.PostProcessRunOutputShow) +func (r *MultiEnvStepRunner) Run( + ctx command.ProjectContext, + shell *valid.CommandShell, + command string, + path string, + envs map[string]string, + postProcessOutput valid.PostProcessRunOutputOption, +) (string, error) { + res, err := r.RunStepRunner.Run(ctx, shell, command, path, envs, false, postProcessOutput) if err != nil { return "", err } + var sb strings.Builder if len(res) == 0 { - return "No dynamic environment variable added", nil - } + sb.WriteString("No dynamic environment variable added") + } else { + sb.WriteString("Dynamic environment variables added:\n") - var sb strings.Builder - sb.WriteString("Dynamic environment variables added:\n") + vars, err := parseMultienvLine(res) + if err != nil { + return "", fmt.Errorf("Invalid environment variable definition: %s (%w)", res, err) + } - vars, err := parseMultienvLine(res) - if err != nil { - return "", fmt.Errorf("Invalid environment variable definition: %s (%w)", res, err) + for i := 0; i < len(vars); i += 2 { + key := vars[i] + envs[key] = vars[i+1] + sb.WriteString(key) + sb.WriteRune('\n') + } } - for i := 0; i < len(vars); i += 2 { - key := vars[i] - envs[key] = vars[i+1] - sb.WriteString(key) - sb.WriteRune('\n') + switch postProcessOutput { + case valid.PostProcessRunOutputHide: + return "", nil + case valid.PostProcessRunOutputShow: + return sb.String(), nil + default: + return sb.String(), nil } - - return sb.String(), nil } func parseMultienvLine(in string) ([]string, error) { diff --git a/server/core/runtime/multienv_step_runner_test.go b/server/core/runtime/multienv_step_runner_test.go index f7d6b1132f..360adce3f5 100644 --- a/server/core/runtime/multienv_step_runner_test.go +++ b/server/core/runtime/multienv_step_runner_test.go @@ -5,6 +5,7 @@ import ( version "github.com/hashicorp/go-version" . "github.com/petergtz/pegomock/v4" + "github.com/runatlantis/atlantis/server/core/config/valid" "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/core/terraform/mocks" "github.com/runatlantis/atlantis/server/events/command" @@ -84,7 +85,7 @@ func TestMultiEnvStepRunner_Run(t *testing.T) { ProjectName: c.ProjectName, } envMap := make(map[string]string) - value, err := multiEnvStepRunner.Run(ctx, c.Command, tmpDir, envMap) + value, err := multiEnvStepRunner.Run(ctx, nil, c.Command, tmpDir, envMap, valid.PostProcessRunOutputShow) if c.ExpErr != "" { ErrContains(t, c.ExpErr, err) return diff --git a/server/core/runtime/plan_step_runner.go b/server/core/runtime/plan_step_runner.go index 64414ae324..b1cb66c1e4 100644 --- a/server/core/runtime/plan_step_runner.go +++ b/server/core/runtime/plan_step_runner.go @@ -294,9 +294,10 @@ locally at this time. // remoteOpsErr110 is the error terraform plan will return if this project is // using Terraform Cloud remote operations in TF 1.1.0 and above +// note: the trailing whitespace is intentional var remoteOpsErr110 = `╷ │ Error: Saving a generated plan is currently not supported -│ +│ │ Terraform Cloud does not support saving the generated execution plan │ locally at this time. â•ĩ diff --git a/server/core/runtime/plan_step_runner_test.go b/server/core/runtime/plan_step_runner_test.go index 6ec16ef0ca..f05336637c 100644 --- a/server/core/runtime/plan_step_runner_test.go +++ b/server/core/runtime/plan_step_runner_test.go @@ -357,7 +357,7 @@ locally at this time. tfVersion: "1.1.0", remoteOpsErr: `╷ │ Error: Saving a generated plan is currently not supported -│ +│ │ Terraform Cloud does not support saving the generated execution plan │ locally at this time. â•ĩ diff --git a/server/core/runtime/policy/conftest_client.go b/server/core/runtime/policy/conftest_client.go index 5218f883d2..dd69bba4cd 100644 --- a/server/core/runtime/policy/conftest_client.go +++ b/server/core/runtime/policy/conftest_client.go @@ -1,6 +1,7 @@ package policy import ( + "context" "fmt" "os" "path/filepath" @@ -10,13 +11,13 @@ import ( "encoding/json" "regexp" + "github.com/hashicorp/go-getter/v2" "github.com/hashicorp/go-multierror" version "github.com/hashicorp/go-version" "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/core/config/valid" "github.com/runatlantis/atlantis/server/core/runtime/cache" runtime_models "github.com/runatlantis/atlantis/server/core/runtime/models" - "github.com/runatlantis/atlantis/server/core/terraform" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" @@ -103,8 +104,21 @@ func (p *SourceResolverProxy) Resolve(policySet valid.PolicySet) (string, error) } } +//go:generate pegomock generate --package mocks -o mocks/mock_downloader.go Downloader + +type Downloader interface { + GetAny(dst, src string) error +} + +type ConfTestGoGetterVersionDownloader struct{} + +func (c ConfTestGoGetterVersionDownloader) GetAny(dst, src string) error { + _, err := getter.GetAny(context.Background(), dst, src) + return err +} + type ConfTestVersionDownloader struct { - downloader terraform.Downloader + downloader Downloader } func (c ConfTestVersionDownloader) downloadConfTestVersion(v *version.Version, destPath string) (runtime_models.FilePath, error) { @@ -142,7 +156,7 @@ type ConfTestExecutorWorkflow struct { Exec runtime_models.Exec } -func NewConfTestExecutorWorkflow(log logging.SimpleLogging, versionRootDir string, conftestDownloder terraform.Downloader) *ConfTestExecutorWorkflow { +func NewConfTestExecutorWorkflow(log logging.SimpleLogging, versionRootDir string, conftestDownloder Downloader) *ConfTestExecutorWorkflow { downloader := ConfTestVersionDownloader{ downloader: conftestDownloder, } diff --git a/server/core/runtime/policy/conftest_client_test.go b/server/core/runtime/policy/conftest_client_test.go index d8c5f5b9dc..3b2bbd0645 100644 --- a/server/core/runtime/policy/conftest_client_test.go +++ b/server/core/runtime/policy/conftest_client_test.go @@ -12,7 +12,6 @@ import ( "github.com/runatlantis/atlantis/server/core/runtime/cache/mocks" models_mocks "github.com/runatlantis/atlantis/server/core/runtime/models/mocks" conftest_mocks "github.com/runatlantis/atlantis/server/core/runtime/policy/mocks" - terraform_mocks "github.com/runatlantis/atlantis/server/core/terraform/mocks" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" @@ -27,13 +26,14 @@ func TestConfTestVersionDownloader(t *testing.T) { RegisterMockTestingT(t) - mockDownloader := terraform_mocks.NewMockDownloader() + mockDownloader := conftest_mocks.NewMockDownloader() - subject := ConfTestVersionDownloader{downloader: mockDownloader} + subject := ConfTestVersionDownloader{ + downloader: mockDownloader, + } t.Run("success", func(t *testing.T) { - When(mockDownloader.GetFile(Eq(destPath), Eq(fullURL))).ThenReturn(nil) binPath, err := subject.downloadConfTestVersion(version, destPath) mockDownloader.VerifyWasCalledOnce().GetAny(Eq(destPath), Eq(fullURL)) diff --git a/server/core/runtime/policy/mocks/mock_downloader.go b/server/core/runtime/policy/mocks/mock_downloader.go new file mode 100644 index 0000000000..03a1d73d47 --- /dev/null +++ b/server/core/runtime/policy/mocks/mock_downloader.go @@ -0,0 +1,112 @@ +// Code generated by pegomock. DO NOT EDIT. +// Source: github.com/runatlantis/atlantis/server/core/runtime/policy (interfaces: Downloader) + +package mocks + +import ( + pegomock "github.com/petergtz/pegomock/v4" + "reflect" + "time" +) + +type MockDownloader struct { + fail func(message string, callerSkip ...int) +} + +func NewMockDownloader(options ...pegomock.Option) *MockDownloader { + mock := &MockDownloader{} + for _, option := range options { + option.Apply(mock) + } + return mock +} + +func (mock *MockDownloader) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } +func (mock *MockDownloader) FailHandler() pegomock.FailHandler { return mock.fail } + +func (mock *MockDownloader) GetAny(dst string, src string) error { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockDownloader().") + } + _params := []pegomock.Param{dst, src} + _result := pegomock.GetGenericMockFrom(mock).Invoke("GetAny", _params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) + var _ret0 error + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].(error) + } + } + return _ret0 +} + +func (mock *MockDownloader) VerifyWasCalledOnce() *VerifierMockDownloader { + return &VerifierMockDownloader{ + mock: mock, + invocationCountMatcher: pegomock.Times(1), + } +} + +func (mock *MockDownloader) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockDownloader { + return &VerifierMockDownloader{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + } +} + +func (mock *MockDownloader) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockDownloader { + return &VerifierMockDownloader{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + inOrderContext: inOrderContext, + } +} + +func (mock *MockDownloader) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockDownloader { + return &VerifierMockDownloader{ + mock: mock, + invocationCountMatcher: invocationCountMatcher, + timeout: timeout, + } +} + +type VerifierMockDownloader struct { + mock *MockDownloader + invocationCountMatcher pegomock.InvocationCountMatcher + inOrderContext *pegomock.InOrderContext + timeout time.Duration +} + +func (verifier *VerifierMockDownloader) GetAny(dst string, src string) *MockDownloader_GetAny_OngoingVerification { + _params := []pegomock.Param{dst, src} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetAny", _params, verifier.timeout) + return &MockDownloader_GetAny_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockDownloader_GetAny_OngoingVerification struct { + mock *MockDownloader + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockDownloader_GetAny_OngoingVerification) GetCapturedArguments() (string, string) { + dst, src := c.GetAllCapturedArguments() + return dst[len(dst)-1], src[len(src)-1] +} + +func (c *MockDownloader_GetAny_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string) { + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(string) + } + } + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } + } + } + return +} diff --git a/server/core/runtime/pull_approved_checker.go b/server/core/runtime/pull_approved_checker.go index d3d9c39080..f4884c2e98 100644 --- a/server/core/runtime/pull_approved_checker.go +++ b/server/core/runtime/pull_approved_checker.go @@ -2,10 +2,11 @@ package runtime import ( "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" ) //go:generate pegomock generate --package mocks -o mocks/mock_pull_approved_checker.go PullApprovedChecker type PullApprovedChecker interface { - PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) + PullIsApproved(logger logging.SimpleLogging, baseRepo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) } diff --git a/server/core/runtime/run_step_runner.go b/server/core/runtime/run_step_runner.go index 70d084506a..76629ba460 100644 --- a/server/core/runtime/run_step_runner.go +++ b/server/core/runtime/run_step_runner.go @@ -22,7 +22,15 @@ type RunStepRunner struct { ProjectCmdOutputHandler jobs.ProjectCommandOutputHandler } -func (r *RunStepRunner) Run(ctx command.ProjectContext, command string, path string, envs map[string]string, streamOutput bool, postProcessOutput valid.PostProcessRunOutputOption) (string, error) { +func (r *RunStepRunner) Run( + ctx command.ProjectContext, + shell *valid.CommandShell, + command string, + path string, + envs map[string]string, + streamOutput bool, + postProcessOutput valid.PostProcessRunOutputOption, +) (string, error) { tfVersion := r.DefaultTFVersion if ctx.TerraformVersion != nil { tfVersion = ctx.TerraformVersion @@ -68,9 +76,14 @@ func (r *RunStepRunner) Run(ctx command.ProjectContext, command string, path str finalEnvVars = append(finalEnvVars, fmt.Sprintf("%s=%s", key, val)) } - runner := models.NewShellCommandRunner(command, finalEnvVars, path, streamOutput, r.ProjectCmdOutputHandler) + runner := models.NewShellCommandRunner(shell, command, finalEnvVars, path, streamOutput, r.ProjectCmdOutputHandler) output, err := runner.Run(ctx) + if postProcessOutput == valid.PostProcessRunOutputStripRefreshing { + output = StripRefreshingFromPlanOutput(output, tfVersion) + + } + if err != nil { err = fmt.Errorf("%s: running %q in %q: \n%s", err, command, path, output) if !ctx.CustomPolicyCheck { @@ -84,7 +97,7 @@ func (r *RunStepRunner) Run(ctx command.ProjectContext, command string, path str case valid.PostProcessRunOutputHide: return "", nil case valid.PostProcessRunOutputStripRefreshing: - return StripRefreshingFromPlanOutput(output, tfVersion), nil + return output, nil case valid.PostProcessRunOutputShow: return output, nil default: diff --git a/server/core/runtime/run_step_runner_test.go b/server/core/runtime/run_step_runner_test.go index d011254a09..4672fa2bb0 100644 --- a/server/core/runtime/run_step_runner_test.go +++ b/server/core/runtime/run_step_runner_test.go @@ -145,7 +145,7 @@ func TestRunStepRunner_Run(t *testing.T) { ProjectName: c.ProjectName, EscapedCommentArgs: []string{"-target=resource1", "-target=resource2"}, } - out, err := r.Run(ctx, c.Command, tmpDir, map[string]string{"test": "var"}, true, valid.PostProcessRunOutputShow) + out, err := r.Run(ctx, nil, c.Command, tmpDir, map[string]string{"test": "var"}, true, valid.PostProcessRunOutputShow) if c.ExpErr != "" { ErrContains(t, c.ExpErr, err) return diff --git a/server/core/runtime/show_step_runner.go b/server/core/runtime/show_step_runner.go index e7c0f7ff14..ba89479b56 100644 --- a/server/core/runtime/show_step_runner.go +++ b/server/core/runtime/show_step_runner.go @@ -49,7 +49,7 @@ func (p *showStepRunner) Run(ctx command.ProjectContext, _ []string, path string return "", errors.Wrap(err, "running terraform show") } - if err := os.WriteFile(showResultFile, []byte(output), os.ModePerm); err != nil { + if err := os.WriteFile(showResultFile, []byte(output), 0600); err != nil { return "", errors.Wrap(err, "writing terraform show result") } diff --git a/server/core/runtime/state_rm_step_runner.go b/server/core/runtime/state_rm_step_runner.go index 74a0d18875..3b4a08f102 100644 --- a/server/core/runtime/state_rm_step_runner.go +++ b/server/core/runtime/state_rm_step_runner.go @@ -6,6 +6,7 @@ import ( version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/utils" ) type stateRmStepRunner struct { @@ -37,7 +38,7 @@ func (p *stateRmStepRunner) Run(ctx command.ProjectContext, extraArgs []string, if err == nil { if _, planPathErr := os.Stat(planPath); !os.IsNotExist(planPathErr) { ctx.Log.Info("state rm successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { + if removeErr := utils.RemoveIgnoreNonExistent(planPath); removeErr != nil { ctx.Log.Warn("failed to delete planfile after successful state rm: %s", removeErr) } } diff --git a/server/core/terraform/distribution.go b/server/core/terraform/distribution.go new file mode 100644 index 0000000000..0fd781765d --- /dev/null +++ b/server/core/terraform/distribution.go @@ -0,0 +1,133 @@ +package terraform + +import ( + "context" + "fmt" + "sort" + + "github.com/hashicorp/go-version" + "github.com/hashicorp/hc-install/product" + "github.com/hashicorp/hc-install/releases" + "github.com/opentofu/tofudl" +) + +type Distribution interface { + BinName() string + Downloader() Downloader + // ResolveConstraint gets the latest version for the given constraint + ResolveConstraint(context.Context, string) (*version.Version, error) +} + +type DistributionOpenTofu struct { + downloader Downloader +} + +func NewDistributionOpenTofu() Distribution { + return &DistributionOpenTofu{ + downloader: &TofuDownloader{}, + } +} + +func NewDistributionOpenTofuWithDownloader(downloader Downloader) Distribution { + return &DistributionOpenTofu{ + downloader: downloader, + } +} + +func (*DistributionOpenTofu) BinName() string { + return "tofu" +} + +func (d *DistributionOpenTofu) Downloader() Downloader { + return d.downloader +} + +func (*DistributionOpenTofu) ResolveConstraint(ctx context.Context, constraintStr string) (*version.Version, error) { + dl, err := tofudl.New() + if err != nil { + return nil, err + } + + vc, err := version.NewConstraint(constraintStr) + if err != nil { + return nil, fmt.Errorf("error parsing constraint string: %s", err) + } + + allVersions, err := dl.ListVersions(ctx) + if err != nil { + return nil, fmt.Errorf("error listing OpenTofu versions: %s", err) + } + + var versions []*version.Version + for _, ver := range allVersions { + v, err := version.NewVersion(string(ver.ID)) + if err != nil { + return nil, err + } + + if vc.Check(v) { + versions = append(versions, v) + } + } + sort.Sort(version.Collection(versions)) + + if len(versions) == 0 { + return nil, fmt.Errorf("no OpenTofu versions found for constraints %s", constraintStr) + } + + // We want to select the highest version that satisfies the constraint. + version := versions[len(versions)-1] + + // Get the Version object from the versionDownloader. + return version, nil +} + +type DistributionTerraform struct { + downloader Downloader +} + +func NewDistributionTerraform() Distribution { + return &DistributionTerraform{ + downloader: &TerraformDownloader{}, + } +} + +func NewDistributionTerraformWithDownloader(downloader Downloader) Distribution { + return &DistributionTerraform{ + downloader: downloader, + } +} + +func (*DistributionTerraform) BinName() string { + return "terraform" +} + +func (d *DistributionTerraform) Downloader() Downloader { + return d.downloader +} + +func (*DistributionTerraform) ResolveConstraint(ctx context.Context, constraintStr string) (*version.Version, error) { + vc, err := version.NewConstraint(constraintStr) + if err != nil { + return nil, fmt.Errorf("error parsing constraint string: %s", err) + } + + constrainedVersions := &releases.Versions{ + Product: product.Terraform, + Constraints: vc, + } + + installCandidates, err := constrainedVersions.List(ctx) + if err != nil { + return nil, fmt.Errorf("error listing available versions: %s", err) + } + if len(installCandidates) == 0 { + return nil, fmt.Errorf("no Terraform versions found for constraints %s", constraintStr) + } + + // We want to select the highest version that satisfies the constraint. + versionDownloader := installCandidates[len(installCandidates)-1] + + // Get the Version object from the versionDownloader. + return versionDownloader.(*releases.ExactVersion).Version, nil +} diff --git a/server/core/terraform/distribution_test.go b/server/core/terraform/distribution_test.go new file mode 100644 index 0000000000..dbd9433834 --- /dev/null +++ b/server/core/terraform/distribution_test.go @@ -0,0 +1,33 @@ +package terraform_test + +import ( + "context" + "testing" + + "github.com/runatlantis/atlantis/server/core/terraform" + . "github.com/runatlantis/atlantis/testing" +) + +func TestOpenTofuBinName(t *testing.T) { + d := terraform.NewDistributionOpenTofu() + Equals(t, d.BinName(), "tofu") +} + +func TestResolveOpenTofuVersions(t *testing.T) { + d := terraform.NewDistributionOpenTofu() + version, err := d.ResolveConstraint(context.Background(), "= 1.8.0") + Ok(t, err) + Equals(t, version.String(), "1.8.0") +} + +func TestTerraformBinName(t *testing.T) { + d := terraform.NewDistributionTerraform() + Equals(t, d.BinName(), "terraform") +} + +func TestResolveTerraformVersions(t *testing.T) { + d := terraform.NewDistributionTerraform() + version, err := d.ResolveConstraint(context.Background(), "= 1.9.3") + Ok(t, err) + Equals(t, version.String(), "1.9.3") +} diff --git a/server/core/terraform/downloader.go b/server/core/terraform/downloader.go new file mode 100644 index 0000000000..36cc4e1071 --- /dev/null +++ b/server/core/terraform/downloader.go @@ -0,0 +1,70 @@ +package terraform + +import ( + "context" + "os" + "path/filepath" + + "github.com/hashicorp/go-version" + install "github.com/hashicorp/hc-install" + "github.com/hashicorp/hc-install/product" + "github.com/hashicorp/hc-install/releases" + "github.com/hashicorp/hc-install/src" + "github.com/opentofu/tofudl" +) + +//go:generate pegomock generate --package mocks -o mocks/mock_downloader.go Downloader + +// Downloader is for downloading terraform versions. +type Downloader interface { + Install(ctx context.Context, dir string, downloadURL string, v *version.Version) (string, error) +} + +type TofuDownloader struct{} + +func (d *TofuDownloader) Install(ctx context.Context, dir string, _downloadURL string, v *version.Version) (string, error) { + // Initialize the downloader: + dl, err := tofudl.New() + if err != nil { + return "", err + } + + binary, err := dl.Download(ctx, tofudl.DownloadOptVersion(tofudl.Version(v.String()))) + if err != nil { + return "", err + } + + // Write out the tofu binary to the disk: + file := filepath.Join(dir, "tofu"+v.String()) + if err := os.WriteFile(file, binary, 0755); /* #nosec G306 */ err != nil { + return "", err + } + + return file, nil +} + +type TerraformDownloader struct{} + +func (d *TerraformDownloader) Install(ctx context.Context, dir string, downloadURL string, v *version.Version) (string, error) { + installer := install.NewInstaller() + execPath, err := installer.Install(ctx, []src.Installable{ + &releases.ExactVersion{ + Product: product.Terraform, + Version: v, + InstallDir: dir, + ApiBaseURL: downloadURL, + }, + }) + if err != nil { + return "", err + } + + // hc-install installs terraform binary as just "terraform". + // We need to rename it to terraform{version} to be consistent with current naming convention. + newPath := filepath.Join(dir, "terraform"+v.String()) + if err := os.Rename(execPath, newPath); err != nil { + return "", err + } + + return newPath, nil +} diff --git a/server/core/terraform/downloader_test.go b/server/core/terraform/downloader_test.go new file mode 100644 index 0000000000..b6080c4a90 --- /dev/null +++ b/server/core/terraform/downloader_test.go @@ -0,0 +1,46 @@ +package terraform_test + +import ( + "context" + "os" + "testing" + + "github.com/hashicorp/go-version" + . "github.com/petergtz/pegomock/v4" + "github.com/runatlantis/atlantis/cmd" + "github.com/runatlantis/atlantis/server/core/terraform" +) + +func TestTerraformInstall(t *testing.T) { + d := &terraform.TerraformDownloader{} + RegisterMockTestingT(t) + binDir := t.TempDir() + + v, _ := version.NewVersion("1.8.1") + + newPath, err := d.Install(context.Background(), binDir, cmd.DefaultTFDownloadURL, v) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + if _, err := os.Stat(newPath); os.IsNotExist(err) { + t.Errorf("Binary not found at %s", newPath) + } +} + +func TestOpenTofuInstall(t *testing.T) { + d := &terraform.TofuDownloader{} + RegisterMockTestingT(t) + binDir := t.TempDir() + + v, _ := version.NewVersion("1.8.0") + + newPath, err := d.Install(context.Background(), binDir, cmd.DefaultTFDownloadURL, v) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + if _, err := os.Stat(newPath); os.IsNotExist(err) { + t.Errorf("Binary not found at %s", newPath) + } +} diff --git a/server/core/terraform/mocks/mock_downloader.go b/server/core/terraform/mocks/mock_downloader.go index ed16db0eff..8f2e57c24d 100644 --- a/server/core/terraform/mocks/mock_downloader.go +++ b/server/core/terraform/mocks/mock_downloader.go @@ -4,6 +4,8 @@ package mocks import ( + context "context" + go_version "github.com/hashicorp/go-version" pegomock "github.com/petergtz/pegomock/v4" "reflect" "time" @@ -24,34 +26,23 @@ func NewMockDownloader(options ...pegomock.Option) *MockDownloader { func (mock *MockDownloader) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockDownloader) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockDownloader) GetAny(dst string, src string) error { +func (mock *MockDownloader) Install(ctx context.Context, dir string, downloadURL string, v *go_version.Version) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockDownloader().") } - params := []pegomock.Param{dst, src} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetAny", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) + _params := []pegomock.Param{ctx, dir, downloadURL, v} + _result := pegomock.GetGenericMockFrom(mock).Invoke("Install", _params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var _ret0 string + var _ret1 error + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].(string) } - } - return ret0 -} - -func (mock *MockDownloader) GetFile(dst string, src string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockDownloader().") - } - params := []pegomock.Param{dst, src} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetFile", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) + if _result[1] != nil { + _ret1 = _result[1].(error) } } - return ret0 + return _ret0, _ret1 } func (mock *MockDownloader) VerifyWasCalledOnce() *VerifierMockDownloader { @@ -91,63 +82,48 @@ type VerifierMockDownloader struct { timeout time.Duration } -func (verifier *VerifierMockDownloader) GetAny(dst string, src string) *MockDownloader_GetAny_OngoingVerification { - params := []pegomock.Param{dst, src} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetAny", params, verifier.timeout) - return &MockDownloader_GetAny_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockDownloader) Install(ctx context.Context, dir string, downloadURL string, v *go_version.Version) *MockDownloader_Install_OngoingVerification { + _params := []pegomock.Param{ctx, dir, downloadURL, v} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Install", _params, verifier.timeout) + return &MockDownloader_Install_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockDownloader_GetAny_OngoingVerification struct { +type MockDownloader_Install_OngoingVerification struct { mock *MockDownloader methodInvocations []pegomock.MethodInvocation } -func (c *MockDownloader_GetAny_OngoingVerification) GetCapturedArguments() (string, string) { - dst, src := c.GetAllCapturedArguments() - return dst[len(dst)-1], src[len(src)-1] +func (c *MockDownloader_Install_OngoingVerification) GetCapturedArguments() (context.Context, string, string, *go_version.Version) { + ctx, dir, downloadURL, v := c.GetAllCapturedArguments() + return ctx[len(ctx)-1], dir[len(dir)-1], downloadURL[len(downloadURL)-1], v[len(v)-1] } -func (c *MockDownloader_GetAny_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) +func (c *MockDownloader_Install_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []string, _param2 []string, _param3 []*go_version.Version) { + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]context.Context, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(context.Context) + } } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } } - } - return -} - -func (verifier *VerifierMockDownloader) GetFile(dst string, src string) *MockDownloader_GetFile_OngoingVerification { - params := []pegomock.Param{dst, src} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetFile", params, verifier.timeout) - return &MockDownloader_GetFile_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockDownloader_GetFile_OngoingVerification struct { - mock *MockDownloader - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockDownloader_GetFile_OngoingVerification) GetCapturedArguments() (string, string) { - dst, src := c.GetAllCapturedArguments() - return dst[len(dst)-1], src[len(src)-1] -} - -func (c *MockDownloader_GetFile_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) + if len(_params) > 2 { + _param2 = make([]string, len(c.methodInvocations)) + for u, param := range _params[2] { + _param2[u] = param.(string) + } } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) + if len(_params) > 3 { + _param3 = make([]*go_version.Version, len(c.methodInvocations)) + for u, param := range _params[3] { + _param3[u] = param.(*go_version.Version) + } } } return diff --git a/server/core/terraform/mocks/mock_terraform_client.go b/server/core/terraform/mocks/mock_terraform_client.go index f5fe812a36..279de1a751 100644 --- a/server/core/terraform/mocks/mock_terraform_client.go +++ b/server/core/terraform/mocks/mock_terraform_client.go @@ -31,68 +31,49 @@ func (mock *MockClient) DetectVersion(log logging.SimpleLogging, projectDirector if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{log, projectDirectory} - result := pegomock.GetGenericMockFrom(mock).Invoke("DetectVersion", params, []reflect.Type{reflect.TypeOf((**go_version.Version)(nil)).Elem()}) - var ret0 *go_version.Version - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*go_version.Version) + _params := []pegomock.Param{log, projectDirectory} + _result := pegomock.GetGenericMockFrom(mock).Invoke("DetectVersion", _params, []reflect.Type{reflect.TypeOf((**go_version.Version)(nil)).Elem()}) + var _ret0 *go_version.Version + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].(*go_version.Version) } } - return ret0 + return _ret0 } func (mock *MockClient) EnsureVersion(log logging.SimpleLogging, v *go_version.Version) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{log, v} - result := pegomock.GetGenericMockFrom(mock).Invoke("EnsureVersion", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) + _params := []pegomock.Param{log, v} + _result := pegomock.GetGenericMockFrom(mock).Invoke("EnsureVersion", _params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) + var _ret0 error + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].(error) } } - return ret0 -} - -func (mock *MockClient) ListAvailableVersions(log logging.SimpleLogging) ([]string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{log} - result := pegomock.GetGenericMockFrom(mock).Invoke("ListAvailableVersions", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 + return _ret0 } func (mock *MockClient) RunCommandWithVersion(ctx command.ProjectContext, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{ctx, path, args, envs, v, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("RunCommandWithVersion", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) + _params := []pegomock.Param{ctx, path, args, envs, v, workspace} + _result := pegomock.GetGenericMockFrom(mock).Invoke("RunCommandWithVersion", _params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var _ret0 string + var _ret1 error + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].(string) } - if result[1] != nil { - ret1 = result[1].(error) + if _result[1] != nil { + _ret1 = _result[1].(error) } } - return ret0, ret1 + return _ret0, _ret1 } func (mock *MockClient) VerifyWasCalledOnce() *VerifierMockClient { @@ -133,8 +114,8 @@ type VerifierMockClient struct { } func (verifier *VerifierMockClient) DetectVersion(log logging.SimpleLogging, projectDirectory string) *MockClient_DetectVersion_OngoingVerification { - params := []pegomock.Param{log, projectDirectory} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DetectVersion", params, verifier.timeout) + _params := []pegomock.Param{log, projectDirectory} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DetectVersion", _params, verifier.timeout) return &MockClient_DetectVersion_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -149,23 +130,27 @@ func (c *MockClient_DetectVersion_OngoingVerification) GetCapturedArguments() (l } func (c *MockClient_DetectVersion_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.SimpleLogging) + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(logging.SimpleLogging) + } } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } } } return } func (verifier *VerifierMockClient) EnsureVersion(log logging.SimpleLogging, v *go_version.Version) *MockClient_EnsureVersion_OngoingVerification { - params := []pegomock.Param{log, v} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "EnsureVersion", params, verifier.timeout) + _params := []pegomock.Param{log, v} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "EnsureVersion", _params, verifier.timeout) return &MockClient_EnsureVersion_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -180,50 +165,27 @@ func (c *MockClient_EnsureVersion_OngoingVerification) GetCapturedArguments() (l } func (c *MockClient_EnsureVersion_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []*go_version.Version) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.SimpleLogging) - } - _param1 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*go_version.Version) + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(logging.SimpleLogging) + } } - } - return -} - -func (verifier *VerifierMockClient) ListAvailableVersions(log logging.SimpleLogging) *MockClient_ListAvailableVersions_OngoingVerification { - params := []pegomock.Param{log} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ListAvailableVersions", params, verifier.timeout) - return &MockClient_ListAvailableVersions_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_ListAvailableVersions_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_ListAvailableVersions_OngoingVerification) GetCapturedArguments() logging.SimpleLogging { - log := c.GetAllCapturedArguments() - return log[len(log)-1] -} - -func (c *MockClient_ListAvailableVersions_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.SimpleLogging) + if len(_params) > 1 { + _param1 = make([]*go_version.Version, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(*go_version.Version) + } } } return } func (verifier *VerifierMockClient) RunCommandWithVersion(ctx command.ProjectContext, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) *MockClient_RunCommandWithVersion_OngoingVerification { - params := []pegomock.Param{ctx, path, args, envs, v, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunCommandWithVersion", params, verifier.timeout) + _params := []pegomock.Param{ctx, path, args, envs, v, workspace} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunCommandWithVersion", _params, verifier.timeout) return &MockClient_RunCommandWithVersion_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -238,31 +200,43 @@ func (c *MockClient_RunCommandWithVersion_OngoingVerification) GetCapturedArgume } func (c *MockClient_RunCommandWithVersion_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext, _param1 []string, _param2 [][]string, _param3 []map[string]string, _param4 []*go_version.Version, _param5 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.ProjectContext) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([][]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.([]string) - } - _param3 = make([]map[string]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(map[string]string) - } - _param4 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(*go_version.Version) - } - _param5 = make([]string, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(string) + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]command.ProjectContext, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(command.ProjectContext) + } + } + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } + } + if len(_params) > 2 { + _param2 = make([][]string, len(c.methodInvocations)) + for u, param := range _params[2] { + _param2[u] = param.([]string) + } + } + if len(_params) > 3 { + _param3 = make([]map[string]string, len(c.methodInvocations)) + for u, param := range _params[3] { + _param3[u] = param.(map[string]string) + } + } + if len(_params) > 4 { + _param4 = make([]*go_version.Version, len(c.methodInvocations)) + for u, param := range _params[4] { + _param4[u] = param.(*go_version.Version) + } + } + if len(_params) > 5 { + _param5 = make([]string, len(c.methodInvocations)) + for u, param := range _params[5] { + _param5[u] = param.(string) + } } } return diff --git a/server/core/terraform/terraform_client.go b/server/core/terraform/terraform_client.go index 7ca6fc7277..cd2a0d8ad7 100644 --- a/server/core/terraform/terraform_client.go +++ b/server/core/terraform/terraform_client.go @@ -19,23 +19,18 @@ package terraform import ( "context" "fmt" - "net/http" "os" "os/exec" "path/filepath" "regexp" - "runtime" - "sort" "strings" "sync" "time" - "github.com/hashicorp/go-getter/v2" "github.com/hashicorp/go-version" "github.com/hashicorp/terraform-config-inspect/tfconfig" "github.com/mitchellh/go-homedir" "github.com/pkg/errors" - "github.com/warrensbox/terraform-switcher/lib" "github.com/runatlantis/atlantis/server/core/runtime/models" "github.com/runatlantis/atlantis/server/events/command" @@ -57,14 +52,14 @@ type Client interface { // EnsureVersion makes sure that terraform version `v` is available to use EnsureVersion(log logging.SimpleLogging, v *version.Version) error - // ListAvailableVersions returns all available version of Terraform, if available; otherwise this will return an empty list. - ListAvailableVersions(log logging.SimpleLogging) ([]string, error) - // DetectVersion Extracts required_version from Terraform configuration in the specified project directory. Returns nil if unable to determine the version. DetectVersion(log logging.SimpleLogging, projectDirectory string) *version.Version } type DefaultClient struct { + // Distribution handles logic specific to the TF distribution being used by Atlantis + distribution Distribution + // defaultVersion is the default version of terraform to use if another // version isn't specified. defaultVersion *version.Version @@ -75,8 +70,7 @@ type DefaultClient struct { // overrideTF can be used to override the terraform binary during testing // with another binary, ex. echo. overrideTF string - // downloader downloads terraform versions. - downloader Downloader + // settings for the downloader. downloadBaseURL string downloadAllowed bool // versions maps from the string representation of a tf version (ex. 0.11.10) @@ -93,14 +87,6 @@ type DefaultClient struct { projectCmdOutputHandler jobs.ProjectCommandOutputHandler } -//go:generate pegomock generate --package mocks -o mocks/mock_downloader.go Downloader - -// Downloader is for downloading terraform versions. -type Downloader interface { - GetFile(dst, src string) error - GetAny(dst, src string) error -} - // versionRegex extracts the version from `terraform version` output. // // Terraform v0.12.0-alpha4 (2c36829d3265661d8edbd5014de8090ea7e2a076) @@ -108,11 +94,15 @@ type Downloader interface { // // Terraform v0.11.10 // => 0.11.10 -var versionRegex = regexp.MustCompile("Terraform v(.*?)(\\s.*)?\n") +// +// OpenTofu v1.0.0 +// => 1.0.0 +var versionRegex = regexp.MustCompile("(?:Terraform|OpenTofu) v(.*?)(\\s.*)?\n") // NewClientWithDefaultVersion creates a new terraform client and pre-fetches the default version func NewClientWithDefaultVersion( log logging.SimpleLogging, + distribution Distribution, binDir string, cacheDir string, tfeToken string, @@ -120,7 +110,6 @@ func NewClientWithDefaultVersion( defaultVersionStr string, defaultVersionFlagName string, tfDownloadURL string, - tfDownloader Downloader, tfDownloadAllowed bool, usePluginCache bool, fetchAsync bool, @@ -131,17 +120,18 @@ func NewClientWithDefaultVersion( versions := make(map[string]string) var versionsLock sync.Mutex - localPath, err := exec.LookPath("terraform") + localPath, err := exec.LookPath(distribution.BinName()) if err != nil && defaultVersionStr == "" { - return nil, fmt.Errorf("terraform not found in $PATH. Set --%s or download terraform from https://developer.hashicorp.com/terraform/downloads", defaultVersionFlagName) + return nil, fmt.Errorf("%s not found in $PATH. Set --%s or download terraform from https://developer.hashicorp.com/terraform/downloads", distribution.BinName(), defaultVersionFlagName) } if err == nil { - localVersion, err = getVersion(localPath) + localVersion, err = getVersion(localPath, distribution.BinName()) if err != nil { return nil, err } versions[localVersion.String()] = localPath if defaultVersionStr == "" { + // If they haven't set a default version, then whatever they had // locally is now the default. finalDefaultVersion = localVersion @@ -158,10 +148,10 @@ func NewClientWithDefaultVersion( // Since ensureVersion might end up downloading terraform, // we call it asynchronously so as to not delay server startup. versionsLock.Lock() - _, err := ensureVersion(log, tfDownloader, versions, defaultVersion, binDir, tfDownloadURL, tfDownloadAllowed) + _, err := ensureVersion(log, distribution, versions, defaultVersion, binDir, tfDownloadURL, tfDownloadAllowed) versionsLock.Unlock() if err != nil { - log.Err("could not download terraform %s: %s", defaultVersion.String(), err) + log.Err("could not download %s %s: %s", distribution.BinName(), defaultVersion.String(), err) } } @@ -183,10 +173,10 @@ func NewClientWithDefaultVersion( } } return &DefaultClient{ + distribution: distribution, defaultVersion: finalDefaultVersion, terraformPluginCacheDir: cacheDir, binDir: binDir, - downloader: tfDownloader, downloadBaseURL: tfDownloadURL, downloadAllowed: tfDownloadAllowed, versionsLock: &versionsLock, @@ -199,6 +189,7 @@ func NewClientWithDefaultVersion( func NewTestClient( log logging.SimpleLogging, + distribution Distribution, binDir string, cacheDir string, tfeToken string, @@ -206,13 +197,13 @@ func NewTestClient( defaultVersionStr string, defaultVersionFlagName string, tfDownloadURL string, - tfDownloader Downloader, tfDownloadAllowed bool, usePluginCache bool, projectCmdOutputHandler jobs.ProjectCommandOutputHandler, ) (*DefaultClient, error) { return NewClientWithDefaultVersion( log, + distribution, binDir, cacheDir, tfeToken, @@ -220,7 +211,6 @@ func NewTestClient( defaultVersionStr, defaultVersionFlagName, tfDownloadURL, - tfDownloader, tfDownloadAllowed, usePluginCache, false, @@ -234,10 +224,10 @@ func NewTestClient( // a specific version is set. // defaultVersionFlagName is the name of the flag that sets the default terraform // version. -// tfDownloader is used to download terraform versions. // Will asynchronously download the required version if it doesn't exist already. func NewClient( log logging.SimpleLogging, + distribution Distribution, binDir string, cacheDir string, tfeToken string, @@ -245,13 +235,13 @@ func NewClient( defaultVersionStr string, defaultVersionFlagName string, tfDownloadURL string, - tfDownloader Downloader, tfDownloadAllowed bool, usePluginCache bool, projectCmdOutputHandler jobs.ProjectCommandOutputHandler, ) (*DefaultClient, error) { return NewClientWithDefaultVersion( log, + distribution, binDir, cacheDir, tfeToken, @@ -259,7 +249,6 @@ func NewClient( defaultVersionStr, defaultVersionFlagName, tfDownloadURL, - tfDownloader, tfDownloadAllowed, usePluginCache, true, @@ -278,94 +267,62 @@ func (c *DefaultClient) TerraformBinDir() string { return c.binDir } -// ListAvailableVersions returns all available version of Terraform. If downloads are not allowed, this will return an empty list. -func (c *DefaultClient) ListAvailableVersions(log logging.SimpleLogging) ([]string, error) { - url := fmt.Sprintf("%s/terraform", c.downloadBaseURL) - - if !c.downloadAllowed { - log.Debug("Terraform downloads disabled. Won't list Terraform versions available at %s", url) - return []string{}, nil - } - - log.Debug("Listing Terraform versions available at: %s", url) - - // terraform-switcher calls os.Exit(1) if it fails to successfully GET the configured URL. - // So, before calling it, test if we can connect. Then we can return an error instead if the request fails. - resp, err := http.Get(url) // #nosec G107 -- terraform-switch makes this same call below. Also, we don't process the response payload. - if err != nil { - return nil, fmt.Errorf("Unable to list Terraform versions: %s", err) - } - defer resp.Body.Close() // nolint: errcheck - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("Unable to list Terraform versions: response code %d from %s", resp.StatusCode, url) +// ExtractExactRegex attempts to extract an exact version number from the provided string as a fallback. +// The function expects the version string to be in one of the following formats: "= x.y.z", "=x.y.z", or "x.y.z" where x, y, and z are integers. +// If the version string matches one of these formats, the function returns a slice containing the exact version number. +// If the version string does not match any of these formats, the function logs a debug message and returns nil. +func (c *DefaultClient) ExtractExactRegex(log logging.SimpleLogging, version string) []string { + re := regexp.MustCompile(`^=?\s*([0-9.]+)\s*$`) + matched := re.FindStringSubmatch(version) + if len(matched) == 0 { + log.Debug("exact version regex not found in the version %q", version) + return nil } - - versions, err := lib.GetTFList(url, true) - return versions, err + // The first element of the slice is the entire string, so we want the second element (the first capture group) + tfVersions := []string{matched[1]} + log.Debug("extracted exact version %q from version %q", tfVersions[0], version) + return tfVersions } -// DetectVersion Extracts required_version from Terraform configuration in the specified project directory. Returns nil if unable to determine the version. -// This will also try to intelligently evaluate non-exact matches by listing the available versions of Terraform and picking the best match. +// DetectVersion extracts required_version from Terraform configuration in the specified project directory. Returns nil if unable to determine the version. +// It will also try to evaluate non-exact matches by passing the Constraints to the hc-install Releases API, which will return a list of available versions. +// It will then select the highest version that satisfies the constraint. func (c *DefaultClient) DetectVersion(log logging.SimpleLogging, projectDirectory string) *version.Version { module, diags := tfconfig.LoadModule(projectDirectory) if diags.HasErrors() { - log.Err("Trying to detect required version: %s", diags.Error()) + log.Err("trying to detect required version: %s", diags.Error()) } if len(module.RequiredCore) != 1 { - log.Info("Cannot determine which version to use from terraform configuration, detected %d possibilities.", len(module.RequiredCore)) + log.Info("cannot determine which version to use from terraform configuration, detected %d possibilities.", len(module.RequiredCore)) return nil } requiredVersionSetting := module.RequiredCore[0] log.Debug("Found required_version setting of %q", requiredVersionSetting) - tfVersions, err := c.ListAvailableVersions(log) - if err != nil { - log.Err("Unable to list Terraform versions, may fall back to default: %s", err) - } - - if len(tfVersions) == 0 { - // Fall back to an exact required version string - // We allow `= x.y.z`, `=x.y.z` or `x.y.z` where `x`, `y` and `z` are integers. - re := regexp.MustCompile(`^=?\s*([0-9.]+)\s*$`) - matched := re.FindStringSubmatch(requiredVersionSetting) + if !c.downloadAllowed { + log.Debug("terraform downloads disabled.") + matched := c.ExtractExactRegex(log, requiredVersionSetting) if len(matched) == 0 { - log.Debug("Did not specify exact version in terraform configuration, found %q", requiredVersionSetting) + log.Debug("did not specify exact version in terraform configuration, found %q", requiredVersionSetting) return nil } - tfVersions = []string{matched[1]} - } - - constraint, _ := version.NewConstraint(requiredVersionSetting) - versions := make([]*version.Version, len(tfVersions)) - for i, tfvals := range tfVersions { - newVersion, err := version.NewVersion(tfvals) - if err == nil { - versions[i] = newVersion + version, err := version.NewVersion(matched[0]) + if err != nil { + log.Err("error parsing version string: %s", err) + return nil } + return version } - if len(versions) == 0 { - log.Debug("Did not specify exact valid version in terraform configuration, found %q", requiredVersionSetting) + downloadVersion, err := c.distribution.ResolveConstraint(context.Background(), requiredVersionSetting) + if err != nil { + log.Err("%s", err) return nil } - sort.Sort(sort.Reverse(version.Collection(versions))) - - for _, element := range versions { - if constraint.Check(element) { // Validate a version against a constraint - tfversionStr := element.String() - if lib.ValidVersionFormat(tfversionStr) { //check if version format is correct - tfversion, _ := version.NewVersion(tfversionStr) - log.Info("Detected module requires version: %s", tfversionStr) - return tfversion - } - } - } - log.Debug("Could not match any valid terraform version with %q", requiredVersionSetting) - return nil + return downloadVersion } // See Client.EnsureVersion. @@ -376,7 +333,7 @@ func (c *DefaultClient) EnsureVersion(log logging.SimpleLogging, v *version.Vers var err error c.versionsLock.Lock() - _, err = ensureVersion(log, c.downloader, c.versions, v, c.binDir, c.downloadBaseURL, c.downloadAllowed) + _, err = ensureVersion(log, c.distribution, c.versions, v, c.binDir, c.downloadBaseURL, c.downloadAllowed) c.versionsLock.Unlock() if err != nil { return err @@ -456,7 +413,7 @@ func (c *DefaultClient) prepCmd(log logging.SimpleLogging, v *version.Version, w } else { var err error c.versionsLock.Lock() - binPath, err = ensureVersion(log, c.downloader, c.versions, v, c.binDir, c.downloadBaseURL, c.downloadAllowed) + binPath, err = ensureVersion(log, c.distribution, c.versions, v, c.binDir, c.downloadBaseURL, c.downloadAllowed) c.versionsLock.Unlock() if err != nil { return "", nil, err @@ -509,7 +466,7 @@ func (c *DefaultClient) RunCommandAsync(ctx command.ProjectContext, path string, envVars = append(envVars, fmt.Sprintf("%s=%s", key, val)) } - runner := models.NewShellCommandRunner(cmd, envVars, path, true, c.projectCmdOutputHandler) + runner := models.NewShellCommandRunner(nil, cmd, envVars, path, true, c.projectCmdOutputHandler) inCh, outCh := runner.RunCommandAsync(ctx) return inCh, outCh } @@ -527,7 +484,15 @@ func MustConstraint(v string) version.Constraints { // ensureVersion returns the path to a terraform binary of version v. // It will download this version if we don't have it. -func ensureVersion(log logging.SimpleLogging, dl Downloader, versions map[string]string, v *version.Version, binDir string, downloadURL string, downloadsAllowed bool) (string, error) { +func ensureVersion( + log logging.SimpleLogging, + dist Distribution, + versions map[string]string, + v *version.Version, + binDir string, + downloadURL string, + downloadsAllowed bool, +) (string, error) { if binPath, ok := versions[v.String()]; ok { return binPath, nil } @@ -535,7 +500,7 @@ func ensureVersion(log logging.SimpleLogging, dl Downloader, versions map[string // This tf version might not yet be in the versions map even though it // exists on disk. This would happen if users have manually added // terraform{version} binaries. In this case we don't want to re-download. - binFile := "terraform" + v.String() + binFile := dist.BinName() + v.String() if binPath, err := exec.LookPath(binFile); err == nil { versions[v.String()] = binPath return binPath, nil @@ -549,21 +514,27 @@ func ensureVersion(log logging.SimpleLogging, dl Downloader, versions map[string return dest, nil } if !downloadsAllowed { - return "", fmt.Errorf("Could not find terraform version %s in PATH or %s, and downloads are disabled", v.String(), binDir) + return "", fmt.Errorf( + "could not find %s version %s in PATH or %s, and downloads are disabled", + dist.BinName(), + v.String(), + binDir, + ) } - log.Info("Could not find terraform version %s in PATH or %s, downloading from %s", v.String(), binDir, downloadURL) - urlPrefix := fmt.Sprintf("%s/terraform/%s/terraform_%s", downloadURL, v.String(), v.String()) - binURL := fmt.Sprintf("%s_%s_%s.zip", urlPrefix, runtime.GOOS, runtime.GOARCH) - checksumURL := fmt.Sprintf("%s_SHA256SUMS", urlPrefix) - fullSrcURL := fmt.Sprintf("%s?checksum=file:%s", binURL, checksumURL) - if err := dl.GetFile(dest, fullSrcURL); err != nil { - return "", errors.Wrapf(err, "downloading terraform version %s at %q", v.String(), fullSrcURL) + log.Info("could not find %s version %s in PATH or %s", dist.BinName(), v.String(), binDir) + + log.Info("downloading %s version %s from download URL %s", dist.BinName(), v.String(), downloadURL) + + execPath, err := dist.Downloader().Install(context.Background(), binDir, downloadURL, v) + + if err != nil { + return "", errors.Wrapf(err, "error downloading %s version %s", dist.BinName(), v.String()) } - log.Info("Downloaded terraform %s to %s", v.String(), dest) - versions[v.String()] = dest - return dest, nil + log.Info("Downloaded %s %s to %s", dist.BinName(), v.String(), execPath) + versions[v.String()] = execPath + return execPath, nil } // generateRCFile generates a .terraformrc file containing config for tfeToken @@ -605,15 +576,15 @@ func isAsyncEligibleCommand(cmd string) bool { return false } -func getVersion(tfBinary string) (*version.Version, error) { +func getVersion(tfBinary string, binName string) (*version.Version, error) { versionOutBytes, err := exec.Command(tfBinary, "version").Output() // #nosec versionOutput := string(versionOutBytes) if err != nil { - return nil, errors.Wrapf(err, "running terraform version: %s", versionOutput) + return nil, errors.Wrapf(err, "running %s version: %s", binName, versionOutput) } match := versionRegex.FindStringSubmatch(versionOutput) if len(match) <= 1 { - return nil, fmt.Errorf("could not parse terraform version from %s", versionOutput) + return nil, fmt.Errorf("could not parse %s version from %s", binName, versionOutput) } return version.NewVersion(match[1]) } @@ -624,17 +595,3 @@ func getVersion(tfBinary string) (*version.Version, error) { var rcFileContents = `credentials "%s" { token = %q }` - -type DefaultDownloader struct{} - -// See go-getter.GetFile. -func (d *DefaultDownloader) GetFile(dst, src string) error { - _, err := getter.GetFile(context.Background(), dst, src) - return err -} - -// See go-getter.GetFile. -func (d *DefaultDownloader) GetAny(dst, src string) error { - _, err := getter.GetAny(context.Background(), dst, src) - return err -} diff --git a/server/core/terraform/terraform_client_internal_test.go b/server/core/terraform/terraform_client_internal_test.go index 6dd4c89e85..8c6be3ee43 100644 --- a/server/core/terraform/terraform_client_internal_test.go +++ b/server/core/terraform/terraform_client_internal_test.go @@ -344,7 +344,7 @@ func TestDefaultClient_RunCommandAsync_ExitOne(t *testing.T) { _, outCh := client.RunCommandAsync(ctx, tmp, []string{"dying", "&&", "exit", "1"}, map[string]string{}, nil, "workspace") out, err := waitCh(outCh) - ErrEquals(t, fmt.Sprintf(`running "echo dying && exit 1" in %q: exit status 1`, tmp), err) + ErrEquals(t, fmt.Sprintf(`running 'sh -c "echo dying && exit 1"' in '%s': exit status 1`, tmp), err) // Test that we still get our output. Equals(t, "dying", out) diff --git a/server/core/terraform/terraform_client_test.go b/server/core/terraform/terraform_client_test.go index 29fccb4579..c60a5fb085 100644 --- a/server/core/terraform/terraform_client_test.go +++ b/server/core/terraform/terraform_client_test.go @@ -14,17 +14,17 @@ package terraform_test import ( + "context" "fmt" "os" "path/filepath" - "runtime" + "reflect" "strings" "testing" "time" version "github.com/hashicorp/go-version" . "github.com/petergtz/pegomock/v4" - pegomock "github.com/petergtz/pegomock/v4" "github.com/runatlantis/atlantis/cmd" "github.com/runatlantis/atlantis/server/core/terraform" "github.com/runatlantis/atlantis/server/core/terraform/mocks" @@ -77,7 +77,10 @@ is 0.11.13. You can update by downloading from developer.hashicorp.com/terraform Ok(t, err) defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, true, projectCmdOutputHandler) + mockDownloader := mocks.NewMockDownloader() + distibution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + + c, err := terraform.NewClient(logger, distibution, binDir, cacheDir, "", "", "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, true, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) @@ -111,7 +114,10 @@ is 0.11.13. You can update by downloading from developer.hashicorp.com/terraform Ok(t, err) defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, true, projectCmdOutputHandler) + mockDownloader := mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + + c, err := terraform.NewClient(logger, distribution, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, true, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) @@ -132,7 +138,10 @@ func TestNewClient_NoTF(t *testing.T) { // Set PATH to only include our empty directory. defer tempSetEnv(t, "PATH", tmp)() - _, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, true, projectCmdOutputHandler) + mockDownloader := mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + + _, err := terraform.NewClient(logger, distribution, binDir, cacheDir, "", "", "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, true, true, projectCmdOutputHandler) ErrEquals(t, "terraform not found in $PATH. Set --default-tf-version or download terraform from https://developer.hashicorp.com/terraform/downloads", err) } @@ -155,7 +164,10 @@ func TestNewClient_DefaultTFFlagInPath(t *testing.T) { Ok(t, err) defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, false, true, projectCmdOutputHandler) + mockDownloader := mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + + c, err := terraform.NewClient(logger, distribution, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, false, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) @@ -183,7 +195,10 @@ func TestNewClient_DefaultTFFlagInBinDir(t *testing.T) { Ok(t, err) defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - c, err := terraform.NewClient(logging.NewNoopLogger(t), binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, true, projectCmdOutputHandler) + mockDownloader := mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + + c, err := terraform.NewClient(logging.NewNoopLogger(t), distribution, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, true, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) @@ -211,22 +226,19 @@ func TestNewClient_DefaultTFFlagDownload(t *testing.T) { defer tempSetEnv(t, "PATH", "")() mockDownloader := mocks.NewMockDownloader() - When(mockDownloader.GetFile(Any[string](), Any[string]())).Then(func(params []pegomock.Param) pegomock.ReturnValues { - err := os.WriteFile(params[0].(string), []byte("#!/bin/sh\necho '\nTerraform v0.11.10\n'"), 0700) // #nosec G306 - return []pegomock.ReturnValue{err} + When(mockDownloader.Install(Any[context.Context](), Any[string](), Any[string](), Any[*version.Version]())).Then(func(params []Param) ReturnValues { + binPath := filepath.Join(params[1].(string), "terraform0.11.10") + err := os.WriteFile(binPath, []byte("#!/bin/sh\necho '\nTerraform v0.11.10\n'"), 0700) // #nosec G306 + return []ReturnValue{binPath, err} }) - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, "https://my-mirror.releases.mycompany.com", mockDownloader, true, true, projectCmdOutputHandler) + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + c, err := terraform.NewClient(logger, distribution, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, true, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) - baseURL := "https://my-mirror.releases.mycompany.com/terraform/0.11.10" - expURL := fmt.Sprintf("%s/terraform_0.11.10_%s_%s.zip?checksum=file:%s/terraform_0.11.10_SHA256SUMS", - baseURL, - runtime.GOOS, - runtime.GOARCH, - baseURL) - mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).GetFile(filepath.Join(tmp, "bin", "terraform0.11.10"), expURL) + + mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).Install(context.Background(), binDir, cmd.DefaultTFDownloadURL, version.Must(version.NewVersion("0.11.10"))) // Reset PATH so that it has sh. Ok(t, os.Setenv("PATH", orig)) @@ -241,7 +253,9 @@ func TestNewClient_BadVersion(t *testing.T) { logger := logging.NewNoopLogger(t) _, binDir, cacheDir := mkSubDirs(t) projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - _, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "malformed", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, true, projectCmdOutputHandler) + mockDownloader := mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + _, err := terraform.NewClient(logger, distribution, binDir, cacheDir, "", "", "malformed", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, true, true, projectCmdOutputHandler) ErrEquals(t, "Malformed version: malformed", err) } @@ -257,26 +271,22 @@ func TestRunCommandWithVersion_DLsTF(t *testing.T) { RepoRelDir: ".", } + v, err := version.NewVersion("99.99.99") + Ok(t, err) + mockDownloader := mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) // Set up our mock downloader to write a fake tf binary when it's called. - baseURL := fmt.Sprintf("%s/terraform/99.99.99", cmd.DefaultTFDownloadURL) - expURL := fmt.Sprintf("%s/terraform_99.99.99_%s_%s.zip?checksum=file:%s/terraform_99.99.99_SHA256SUMS", - baseURL, - runtime.GOOS, - runtime.GOARCH, - baseURL) - When(mockDownloader.GetFile(filepath.Join(tmp, "bin", "terraform99.99.99"), expURL)).Then(func(params []pegomock.Param) pegomock.ReturnValues { - err := os.WriteFile(params[0].(string), []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 - return []pegomock.ReturnValue{err} + When(mockDownloader.Install(context.Background(), binDir, cmd.DefaultTFDownloadURL, v)).Then(func(params []Param) ReturnValues { + binPath := filepath.Join(params[1].(string), "terraform99.99.99") + err := os.WriteFile(binPath, []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 + return []ReturnValue{binPath, err} }) - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, true, true, projectCmdOutputHandler) + c, err := terraform.NewClient(logger, distribution, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, true, true, projectCmdOutputHandler) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) - v, err := version.NewVersion("99.99.99") - Ok(t, err) - output, err := c.RunCommandWithVersion(ctx, tmp, []string{"terraform", "init"}, map[string]string{}, v, "") Assert(t, err == nil, "err: %s: %s", err, output) @@ -287,12 +297,14 @@ func TestRunCommandWithVersion_DLsTF(t *testing.T) { func TestEnsureVersion_downloaded(t *testing.T) { logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) - tmp, binDir, cacheDir := mkSubDirs(t) + _, binDir, cacheDir := mkSubDirs(t) projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() mockDownloader := mocks.NewMockDownloader() + distibution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + downloadsAllowed := true - c, err := terraform.NewTestClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, downloadsAllowed, true, projectCmdOutputHandler) + c, err := terraform.NewTestClient(logger, distibution, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, downloadsAllowed, true, projectCmdOutputHandler) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) @@ -300,17 +312,50 @@ func TestEnsureVersion_downloaded(t *testing.T) { v, err := version.NewVersion("99.99.99") Ok(t, err) + When(mockDownloader.Install(context.Background(), binDir, cmd.DefaultTFDownloadURL, v)).Then(func(params []Param) ReturnValues { + binPath := filepath.Join(params[1].(string), "terraform99.99.99") + err := os.WriteFile(binPath, []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 + return []ReturnValue{binPath, err} + }) + err = c.EnsureVersion(logger, v) Ok(t, err) - baseURL := fmt.Sprintf("%s/terraform/99.99.99", cmd.DefaultTFDownloadURL) - expURL := fmt.Sprintf("%s/terraform_99.99.99_%s_%s.zip?checksum=file:%s/terraform_99.99.99_SHA256SUMS", - baseURL, - runtime.GOOS, - runtime.GOARCH, - baseURL) - mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).GetFile(filepath.Join(tmp, "bin", "terraform99.99.99"), expURL) + mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).Install(context.Background(), binDir, cmd.DefaultTFDownloadURL, v) +} + +// Test that EnsureVersion downloads terraform from a custom URL. +func TestEnsureVersion_downloaded_customURL(t *testing.T) { + logger := logging.NewNoopLogger(t) + RegisterMockTestingT(t) + _, binDir, cacheDir := mkSubDirs(t) + projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() + + mockDownloader := mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + downloadsAllowed := true + customURL := "http://releases.example.com" + + c, err := terraform.NewTestClient(logger, distribution, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, customURL, downloadsAllowed, true, projectCmdOutputHandler) + Ok(t, err) + + Equals(t, "0.11.10", c.DefaultVersion().String()) + + v, err := version.NewVersion("99.99.99") + Ok(t, err) + + When(mockDownloader.Install(context.Background(), binDir, customURL, v)).Then(func(params []Param) ReturnValues { + binPath := filepath.Join(params[1].(string), "terraform99.99.99") + err := os.WriteFile(binPath, []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 + return []ReturnValue{binPath, err} + }) + + err = c.EnsureVersion(logger, v) + + Ok(t, err) + + mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).Install(context.Background(), binDir, customURL, v) } // Test that EnsureVersion throws an error when downloads are disabled @@ -321,9 +366,10 @@ func TestEnsureVersion_downloaded_downloadingDisabled(t *testing.T) { projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() mockDownloader := mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) downloadsAllowed := false - c, err := terraform.NewTestClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, downloadsAllowed, true, projectCmdOutputHandler) + c, err := terraform.NewTestClient(logger, distribution, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, downloadsAllowed, true, projectCmdOutputHandler) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) @@ -332,7 +378,7 @@ func TestEnsureVersion_downloaded_downloadingDisabled(t *testing.T) { Ok(t, err) err = c.EnsureVersion(logger, v) - ErrContains(t, "Could not find terraform version", err) + ErrContains(t, "could not find terraform version", err) ErrContains(t, "downloads are disabled", err) mockDownloader.VerifyWasCalled(Never()) } @@ -452,9 +498,12 @@ terraform { RegisterMockTestingT(t) _, binDir, cacheDir := mkSubDirs(t) projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - mockDownloader := mocks.NewMockDownloader() - c, err := terraform.NewTestClient(logger, + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + + c, err := terraform.NewTestClient( + logger, + distribution, binDir, cacheDir, "", @@ -462,7 +511,6 @@ terraform { "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, - mockDownloader, downloadsAllowed, true, projectCmdOutputHandler) @@ -491,3 +539,38 @@ terraform { runDetectVersionTestCase(t, name+": Downloads Disabled", testCase, false) } } + +func TestExtractExactRegex(t *testing.T) { + logger := logging.NewNoopLogger(t) + RegisterMockTestingT(t) + _, binDir, cacheDir := mkSubDirs(t) + projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() + mockDownloader := mocks.NewMockDownloader() + distribution := terraform.NewDistributionTerraformWithDownloader(mockDownloader) + + c, err := terraform.NewTestClient(logger, distribution, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, true, true, projectCmdOutputHandler) + Ok(t, err) + + tests := []struct { + version string + want []string + }{ + {"= 1.2.3", []string{"1.2.3"}}, + {"=1.2.3", []string{"1.2.3"}}, + {"1.2.3", []string{"1.2.3"}}, + {"v1.2.3", nil}, + {">= 1.2.3", nil}, + {">=1.2.3", nil}, + {"<= 1.2.3", nil}, + {"<=1.2.3", nil}, + {"~> 1.2.3", nil}, + } + + for _, tt := range tests { + t.Run(tt.version, func(t *testing.T) { + if got := c.ExtractExactRegex(logger, tt.version); !reflect.DeepEqual(got, tt.want) { + t.Errorf("ExtractExactRegex() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/server/events/apply_command_runner.go b/server/events/apply_command_runner.go index fdaf36d337..6c69032910 100644 --- a/server/events/apply_command_runner.go +++ b/server/events/apply_command_runner.go @@ -60,6 +60,7 @@ type ApplyCommandRunner struct { // SilenceVCSStatusNoPlans is whether any plan should set commit status if no projects // are found silenceVCSStatusNoProjects bool + SilencePRComments []string } func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { @@ -77,7 +78,7 @@ func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { if locked { ctx.Log.Info("ignoring apply command since apply disabled globally") - if err := a.vcsClient.CreateComment(baseRepo, pull.Num, applyDisabledComment, command.Apply.String()); err != nil { + if err := a.vcsClient.CreateComment(ctx.Log, baseRepo, pull.Num, applyDisabledComment, command.Apply.String()); err != nil { ctx.Log.Err("unable to comment on pull request: %s", err) } @@ -86,14 +87,14 @@ func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { if a.DisableApplyAll && !cmd.IsForSpecificProject() { ctx.Log.Info("ignoring apply command without flags since apply all is disabled") - if err := a.vcsClient.CreateComment(baseRepo, pull.Num, applyAllDisabledComment, command.Apply.String()); err != nil { + if err := a.vcsClient.CreateComment(ctx.Log, baseRepo, pull.Num, applyAllDisabledComment, command.Apply.String()); err != nil { ctx.Log.Err("unable to comment on pull request: %s", err) } return } - if err = a.commitStatusUpdater.UpdateCombined(baseRepo, pull, models.PendingCommitStatus, cmd.CommandName()); err != nil { + if err = a.commitStatusUpdater.UpdateCombined(ctx.Log, baseRepo, pull, models.PendingCommitStatus, cmd.CommandName()); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } @@ -102,7 +103,7 @@ func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { // required the Atlantis status checks to pass, then we've now changed // the mergeability status of the pull request. // This sets the approved, mergeable, and sqlocked status in the context. - ctx.PullRequestStatus, err = a.pullReqStatusFetcher.FetchPullStatus(pull) + ctx.PullRequestStatus, err = a.pullReqStatusFetcher.FetchPullStatus(ctx.Log, pull) if err != nil { // On error we continue the request with mergeable assumed false. // We want to continue because not all apply's will need this status, @@ -115,7 +116,7 @@ func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { projectCmds, err = a.prjCmdBuilder.BuildApplyCommands(ctx, cmd) if err != nil { - if statusErr := a.commitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, cmd.CommandName()); statusErr != nil { + if statusErr := a.commitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, cmd.CommandName()); statusErr != nil { ctx.Log.Warn("unable to update commit status: %s", statusErr) } a.pullUpdater.updatePull(ctx, cmd, command.Result{Error: err}) @@ -136,7 +137,7 @@ func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { if pullStatus == nil { // default to 0/0 ctx.Log.Debug("setting VCS status to 0/0 success as no previous state was found") - if err := a.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.Apply, 0, 0); err != nil { + if err := a.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.Apply, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } return @@ -149,7 +150,7 @@ func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { // the Atlantis status to be passing for all pull requests. // Does not apply to skipped runs for specific projects ctx.Log.Debug("setting VCS status to success with no projects found") - if err := a.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.Apply, 0, 0); err != nil { + if err := a.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.Apply, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } } @@ -180,7 +181,7 @@ func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { a.updateCommitStatus(ctx, pullStatus) if a.autoMerger.automergeEnabled(projectCmds) && !cmd.AutoMergeDisabled { - a.autoMerger.automerge(ctx, pullStatus, a.autoMerger.deleteSourceBranchOnMergeEnabled(projectCmds)) + a.autoMerger.automerge(ctx, pullStatus, a.autoMerger.deleteSourceBranchOnMergeEnabled(projectCmds), cmd.AutoMergeMethod) } } @@ -211,6 +212,7 @@ func (a *ApplyCommandRunner) updateCommitStatus(ctx *command.Context, pullStatus } if err := a.commitStatusUpdater.UpdateCombinedCount( + ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, status, diff --git a/server/events/apply_command_runner_test.go b/server/events/apply_command_runner_test.go index 9ce45cc261..2258c3d413 100644 --- a/server/events/apply_command_runner_test.go +++ b/server/events/apply_command_runner_test.go @@ -4,7 +4,7 @@ import ( "errors" "testing" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" . "github.com/petergtz/pegomock/v4" "github.com/runatlantis/atlantis/server/core/db" "github.com/runatlantis/atlantis/server/core/locking" @@ -57,8 +57,8 @@ func TestApplyCommandRunner_IsLocked(t *testing.T) { State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(logger, testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(logger, pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ctx := &command.Context{ User: testdata.User, @@ -72,7 +72,8 @@ func TestApplyCommandRunner_IsLocked(t *testing.T) { When(applyLockChecker.CheckApplyLock()).ThenReturn(locking.ApplyCommandLock{Locked: c.ApplyLocked}, c.ApplyLockError) applyCommandRunner.Run(ctx, &events.CommentCommand{Name: command.Apply}) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, modelPull.Num, c.ExpComment, "apply") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull.Num), Eq(c.ExpComment), Eq("apply")) }) } } @@ -191,9 +192,11 @@ func TestApplyCommandRunner_IsSilenced(t *testing.T) { timesComment = 0 } - vcsClient.VerifyWasCalled(Times(timesComment)).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Times(timesComment)).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) if c.ExpVCSStatusSet { commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq[models.CommitStatus](models.SuccessCommitStatus), @@ -203,6 +206,7 @@ func TestApplyCommandRunner_IsSilenced(t *testing.T) { ) } else { commitUpdater.VerifyWasCalled(Never()).UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[models.CommitStatus](), @@ -257,7 +261,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Once(), }, ExpComment: "Ran Apply for 2 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + "2. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored", }, { @@ -342,7 +346,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Never(), }, ExpComment: "Ran Apply for 2 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + "2. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored", }, { @@ -395,7 +399,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Once(), }, ExpComment: "Ran Apply for 4 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + "2. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + "3. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + "4. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### Apply Summary\n\n4 projects, 3 successful, 0 failed, 1 errored", @@ -429,7 +433,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Once(), }, ExpComment: "Ran Apply for 2 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + "2. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored", }, { @@ -459,7 +463,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Once(), }, ExpComment: "Ran Apply for 2 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + "2. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored", }, } @@ -486,8 +490,8 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Trigger: command.CommentTrigger, } - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(logger, testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(logger, pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) When(projectCommandBuilder.BuildApplyCommands(ctx, cmd)).ThenReturn(c.ProjectContexts, nil) for i := range c.ProjectContexts { @@ -502,7 +506,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { } vcsClient.VerifyWasCalledOnce().CreateComment( - testdata.GithubRepo, modelPull.Num, c.ExpComment, "apply", + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull.Num), Eq(c.ExpComment), Eq("apply"), ) }) } diff --git a/server/events/approve_policies_command_runner.go b/server/events/approve_policies_command_runner.go index 6deefd242f..c1a4fef9cc 100644 --- a/server/events/approve_policies_command_runner.go +++ b/server/events/approve_policies_command_runner.go @@ -45,13 +45,13 @@ func (a *ApprovePoliciesCommandRunner) Run(ctx *command.Context, cmd *CommentCom baseRepo := ctx.Pull.BaseRepo pull := ctx.Pull - if err := a.commitStatusUpdater.UpdateCombined(baseRepo, pull, models.PendingCommitStatus, command.PolicyCheck); err != nil { + if err := a.commitStatusUpdater.UpdateCombined(ctx.Log, baseRepo, pull, models.PendingCommitStatus, command.PolicyCheck); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } projectCmds, err := a.prjCmdBuilder.BuildApprovePoliciesCommands(ctx, cmd) if err != nil { - if statusErr := a.commitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.PolicyCheck); statusErr != nil { + if statusErr := a.commitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.PolicyCheck); statusErr != nil { ctx.Log.Warn("unable to update commit status: %s", statusErr) } a.pullUpdater.updatePull(ctx, cmd, command.Result{Error: err}) @@ -65,7 +65,7 @@ func (a *ApprovePoliciesCommandRunner) Run(ctx *command.Context, cmd *CommentCom // with 0/0 projects approve_policies successfully because some users require // the Atlantis status to be passing for all pull requests. ctx.Log.Debug("setting VCS status to success with no projects found") - if err := a.commitStatusUpdater.UpdateCombinedCount(ctx.Pull.BaseRepo, ctx.Pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { + if err := a.commitStatusUpdater.UpdateCombinedCount(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } } @@ -101,7 +101,7 @@ func (a *ApprovePoliciesCommandRunner) updateCommitStatus(ctx *command.Context, status = models.FailedCommitStatus } - if err := a.commitStatusUpdater.UpdateCombinedCount(ctx.Pull.BaseRepo, ctx.Pull, status, command.PolicyCheck, numSuccess, len(pullStatus.Projects)); err != nil { + if err := a.commitStatusUpdater.UpdateCombinedCount(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, status, command.PolicyCheck, numSuccess, len(pullStatus.Projects)); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } } diff --git a/server/events/automerger.go b/server/events/automerger.go index b3c05e75e8..1d19964076 100644 --- a/server/events/automerger.go +++ b/server/events/automerger.go @@ -13,7 +13,7 @@ type AutoMerger struct { GlobalAutomerge bool } -func (c *AutoMerger) automerge(ctx *command.Context, pullStatus models.PullStatus, deleteSourceBranchOnMerge bool) { +func (c *AutoMerger) automerge(ctx *command.Context, pullStatus models.PullStatus, deleteSourceBranchOnMerge bool, mergeMethod string) { // We only automerge if all projects have been successfully applied. for _, p := range pullStatus.Projects { if p.Status != models.AppliedPlanStatus { @@ -23,7 +23,7 @@ func (c *AutoMerger) automerge(ctx *command.Context, pullStatus models.PullStatu } // Comment that we're automerging the pull request. - if err := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, automergeComment, command.Apply.String()); err != nil { + if err := c.VCSClient.CreateComment(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull.Num, automergeComment, command.Apply.String()); err != nil { ctx.Log.Err("failed to comment about automerge: %s", err) // Commenting isn't required so continue. } @@ -32,13 +32,14 @@ func (c *AutoMerger) automerge(ctx *command.Context, pullStatus models.PullStatu ctx.Log.Info("automerging pull request") var pullOptions models.PullRequestOptions pullOptions.DeleteSourceBranchOnMerge = deleteSourceBranchOnMerge - err := c.VCSClient.MergePull(ctx.Pull, pullOptions) + pullOptions.MergeMethod = mergeMethod + err := c.VCSClient.MergePull(ctx.Log, ctx.Pull, pullOptions) if err != nil { ctx.Log.Err("automerging failed: %s", err) failureComment := fmt.Sprintf("Automerging failed:\n```\n%s\n```", err) - if commentErr := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, failureComment, command.Apply.String()); commentErr != nil { + if commentErr := c.VCSClient.CreateComment(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull.Num, failureComment, command.Apply.String()); commentErr != nil { ctx.Log.Err("failed to comment about automerge failing: %s", err) } } diff --git a/server/events/command/context.go b/server/events/command/context.go index 1d6748915c..fb69e917b6 100644 --- a/server/events/command/context.go +++ b/server/events/command/context.go @@ -43,4 +43,10 @@ type Context struct { ClearPolicyApproval bool Trigger Trigger + + // API is true if plan/apply by API endpoints + API bool + + // TeamAllowlistChecker is used to check authorization on a project-level + TeamAllowlistChecker TeamAllowlistChecker } diff --git a/server/events/command/name_test.go b/server/events/command/name_test.go index c45bba1db7..8d91941b46 100644 --- a/server/events/command/name_test.go +++ b/server/events/command/name_test.go @@ -8,6 +8,7 @@ import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestName_TitleString(t *testing.T) { @@ -182,7 +183,7 @@ func TestParseCommandName(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := command.ParseCommandName(tt.name) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, tt.exp, got) }) } diff --git a/server/events/command/project_context.go b/server/events/command/project_context.go index c06681ef82..5490b4a41e 100644 --- a/server/events/command/project_context.go +++ b/server/events/command/project_context.go @@ -113,8 +113,8 @@ type ProjectContext struct { ClearPolicyApproval bool // DeleteSourceBranchOnMerge will attempt to allow a branch to be deleted when merged (AzureDevOps & GitLab Support Only) DeleteSourceBranchOnMerge bool - // RepoLocking will get a lock when plan - RepoLocking bool + // Repo locks mode: disabled, on plan or on apply + RepoLocksMode valid.RepoLocksMode // RepoConfigFile RepoConfigFile string // UUID for atlantis logs @@ -125,6 +125,10 @@ type ProjectContext struct { AbortOnExcecutionOrderFail bool // Allows custom policy check tools outside of Conftest to run in checks CustomPolicyCheck bool + SilencePRComments []string + + // TeamAllowlistChecker is used to check authorization on a project-level + TeamAllowlistChecker TeamAllowlistChecker } // SetProjectScopeTags adds ProjectContext tags to a new returned scope. diff --git a/server/events/command/project_result.go b/server/events/command/project_result.go index 0d59c4e9ab..8f72f1d168 100644 --- a/server/events/command/project_result.go +++ b/server/events/command/project_result.go @@ -19,6 +19,7 @@ type ProjectResult struct { ImportSuccess *models.ImportSuccess StateRmSuccess *models.StateRmSuccess ProjectName string + SilencePRComments []string } // CommitStatus returns the vcs commit status of this project result. diff --git a/server/events/team_allowlist_checker.go b/server/events/command/team_allowlist_checker.go similarity index 53% rename from server/events/team_allowlist_checker.go rename to server/events/command/team_allowlist_checker.go index 01e7bed73c..5c58873650 100644 --- a/server/events/team_allowlist_checker.go +++ b/server/events/command/team_allowlist_checker.go @@ -1,7 +1,9 @@ -package events +package command import ( "strings" + + "github.com/runatlantis/atlantis/server/events/models" ) // Wildcard matches all teams and all commands @@ -10,14 +12,25 @@ const wildcard = "*" // mapOfStrings is an alias for map[string]string type mapOfStrings map[string]string -// TeamAllowlistChecker implements checking the teams and the operations that the members +type TeamAllowlistChecker interface { + // HasRules returns true if the checker has rules defined + HasRules() bool + + // IsCommandAllowedForTeam determines if the specified team can perform the specified action + IsCommandAllowedForTeam(ctx models.TeamAllowlistCheckerContext, team, command string) bool + + // IsCommandAllowedForAnyTeam determines if any of the specified teams can perform the specified action + IsCommandAllowedForAnyTeam(ctx models.TeamAllowlistCheckerContext, teams []string, command string) bool +} + +// DefaultTeamAllowlistChecker implements checking the teams and the operations that the members // of a particular team are allowed to perform -type TeamAllowlistChecker struct { +type DefaultTeamAllowlistChecker struct { rules []mapOfStrings } // NewTeamAllowlistChecker constructs a new checker -func NewTeamAllowlistChecker(allowlist string) (*TeamAllowlistChecker, error) { +func NewTeamAllowlistChecker(allowlist string) (*DefaultTeamAllowlistChecker, error) { var rules []mapOfStrings pairs := strings.Split(allowlist, ",") if pairs[0] != "" { @@ -29,18 +42,18 @@ func NewTeamAllowlistChecker(allowlist string) (*TeamAllowlistChecker, error) { rules = append(rules, m) } } - return &TeamAllowlistChecker{ + return &DefaultTeamAllowlistChecker{ rules: rules, }, nil } -func (checker *TeamAllowlistChecker) HasRules() bool { +func (checker *DefaultTeamAllowlistChecker) HasRules() bool { return len(checker.rules) > 0 } // IsCommandAllowedForTeam returns true if the team is allowed to execute the command // and false otherwise. -func (checker *TeamAllowlistChecker) IsCommandAllowedForTeam(team string, command string) bool { +func (checker *DefaultTeamAllowlistChecker) IsCommandAllowedForTeam(_ models.TeamAllowlistCheckerContext, team string, command string) bool { for _, rule := range checker.rules { for key, value := range rule { if (key == wildcard || strings.EqualFold(key, team)) && (value == wildcard || strings.EqualFold(value, command)) { @@ -53,7 +66,7 @@ func (checker *TeamAllowlistChecker) IsCommandAllowedForTeam(team string, comman // IsCommandAllowedForAnyTeam returns true if any of the teams is allowed to execute the command // and false otherwise. -func (checker *TeamAllowlistChecker) IsCommandAllowedForAnyTeam(teams []string, command string) bool { +func (checker *DefaultTeamAllowlistChecker) IsCommandAllowedForAnyTeam(ctx models.TeamAllowlistCheckerContext, teams []string, command string) bool { if len(teams) == 0 { for _, rule := range checker.rules { for key, value := range rule { @@ -64,7 +77,7 @@ func (checker *TeamAllowlistChecker) IsCommandAllowedForAnyTeam(teams []string, } } else { for _, t := range teams { - if checker.IsCommandAllowedForTeam(t, command) { + if checker.IsCommandAllowedForTeam(ctx, t, command) { return true } } diff --git a/server/events/command/team_allowlist_checker_test.go b/server/events/command/team_allowlist_checker_test.go new file mode 100644 index 0000000000..ddbe402003 --- /dev/null +++ b/server/events/command/team_allowlist_checker_test.go @@ -0,0 +1,45 @@ +package command_test + +import ( + "testing" + + "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/events/models" + + . "github.com/runatlantis/atlantis/testing" +) + +func TestNewTeamAllowListChecker(t *testing.T) { + allowlist := `bob:plan, dave:apply` + _, err := command.NewTeamAllowlistChecker(allowlist) + Ok(t, err) +} + +func TestNewTeamAllowListCheckerEmpty(t *testing.T) { + allowlist := `` + checker, err := command.NewTeamAllowlistChecker(allowlist) + Ok(t, err) + Equals(t, false, checker.HasRules()) +} + +func TestIsCommandAllowedForTeam(t *testing.T) { + allowlist := `bob:plan, dave:apply, connie:plan, connie:apply` + checker, err := command.NewTeamAllowlistChecker(allowlist) + Ok(t, err) + Equals(t, true, checker.IsCommandAllowedForTeam(models.TeamAllowlistCheckerContext{}, "connie", "plan")) + Equals(t, true, checker.IsCommandAllowedForTeam(models.TeamAllowlistCheckerContext{}, "connie", "apply")) + Equals(t, true, checker.IsCommandAllowedForTeam(models.TeamAllowlistCheckerContext{}, "dave", "apply")) + Equals(t, true, checker.IsCommandAllowedForTeam(models.TeamAllowlistCheckerContext{}, "bob", "plan")) + Equals(t, false, checker.IsCommandAllowedForTeam(models.TeamAllowlistCheckerContext{}, "bob", "apply")) +} + +func TestIsCommandAllowedForAnyTeam(t *testing.T) { + allowlist := `alpha:plan,beta:release,*:unlock,nobody:*` + teams := []string{`alpha`, `beta`} + checker, err := command.NewTeamAllowlistChecker(allowlist) + Ok(t, err) + Equals(t, true, checker.IsCommandAllowedForAnyTeam(models.TeamAllowlistCheckerContext{}, teams, `plan`)) + Equals(t, true, checker.IsCommandAllowedForAnyTeam(models.TeamAllowlistCheckerContext{}, teams, `release`)) + Equals(t, true, checker.IsCommandAllowedForAnyTeam(models.TeamAllowlistCheckerContext{}, teams, `unlock`)) + Equals(t, false, checker.IsCommandAllowedForAnyTeam(models.TeamAllowlistCheckerContext{}, teams, `noop`)) +} diff --git a/server/events/command_requirement_handler.go b/server/events/command_requirement_handler.go index 5c7b1c1d54..bf95a255ce 100644 --- a/server/events/command_requirement_handler.go +++ b/server/events/command_requirement_handler.go @@ -33,7 +33,7 @@ func (a *DefaultCommandRequirementHandler) ValidatePlanProject(repoDir string, c return "Pull request must be mergeable before running plan.", nil } case raw.UnDivergedRequirement: - if a.WorkingDir.HasDiverged(repoDir) { + if a.WorkingDir.HasDiverged(ctx.Log, repoDir) { return "Default branch must be rebased onto pull request before running plan.", nil } } @@ -60,7 +60,7 @@ func (a *DefaultCommandRequirementHandler) ValidateApplyProject(repoDir string, return "Pull request must be mergeable before running apply.", nil } case raw.UnDivergedRequirement: - if a.WorkingDir.HasDiverged(repoDir) { + if a.WorkingDir.HasDiverged(ctx.Log, repoDir) { return "Default branch must be rebased onto pull request before running apply.", nil } } @@ -95,7 +95,7 @@ func (a *DefaultCommandRequirementHandler) ValidateImportProject(repoDir string, return "Pull request must be mergeable before running import.", nil } case raw.UnDivergedRequirement: - if a.WorkingDir.HasDiverged(repoDir) { + if a.WorkingDir.HasDiverged(ctx.Log, repoDir) { return "Default branch must be rebased onto pull request before running import.", nil } } diff --git a/server/events/command_requirement_handler_test.go b/server/events/command_requirement_handler_test.go index 1c737f05aa..149e3a608b 100644 --- a/server/events/command_requirement_handler_test.go +++ b/server/events/command_requirement_handler_test.go @@ -9,6 +9,7 @@ import ( "github.com/runatlantis/atlantis/server/core/config/valid" "github.com/runatlantis/atlantis/server/events" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/mocks" @@ -46,7 +47,7 @@ func TestAggregateApplyRequirements_ValidatePlanProject(t *testing.T) { ProjectPlanStatus: models.PassedPolicyCheckStatus, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(false) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(false) }, wantErr: assert.NoError, }, @@ -76,7 +77,7 @@ func TestAggregateApplyRequirements_ValidatePlanProject(t *testing.T) { PlanRequirements: []string{raw.UnDivergedRequirement}, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(true) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(true) }, wantFailure: "Default branch must be rebased onto pull request before running plan.", wantErr: assert.NoError, @@ -130,7 +131,7 @@ func TestAggregateApplyRequirements_ValidateApplyProject(t *testing.T) { ProjectPlanStatus: models.PassedPolicyCheckStatus, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(false) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(false) }, wantErr: assert.NoError, }, @@ -184,7 +185,7 @@ func TestAggregateApplyRequirements_ValidateApplyProject(t *testing.T) { ApplyRequirements: []string{raw.UnDivergedRequirement}, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(true) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(true) }, wantFailure: "Default branch must be rebased onto pull request before running apply.", wantErr: assert.NoError, @@ -363,7 +364,7 @@ func TestAggregateApplyRequirements_ValidateImportProject(t *testing.T) { ProjectPlanStatus: models.PassedPolicyCheckStatus, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(false) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(false) }, wantErr: assert.NoError, }, @@ -393,7 +394,7 @@ func TestAggregateApplyRequirements_ValidateImportProject(t *testing.T) { ImportRequirements: []string{raw.UnDivergedRequirement}, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(true) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(true) }, wantFailure: "Default branch must be rebased onto pull request before running import.", wantErr: assert.NoError, diff --git a/server/events/command_runner.go b/server/events/command_runner.go index 4bf9c0e653..a56ecf67c2 100644 --- a/server/events/command_runner.go +++ b/server/events/command_runner.go @@ -17,13 +17,14 @@ import ( "fmt" "strconv" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/mcdafydd/go-azuredevops/azuredevops" "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/core/config/valid" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs" + "github.com/runatlantis/atlantis/server/events/vcs/gitea" "github.com/runatlantis/atlantis/server/logging" "github.com/runatlantis/atlantis/server/metrics" "github.com/runatlantis/atlantis/server/recovery" @@ -36,7 +37,7 @@ const ( ShutdownComment = "Atlantis server is shutting down, please try again later." ) -//go:generate pegomock generate --package mocks -o mocks/mock_command_runner.go CommandRunner +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_command_runner.go CommandRunner // CommandRunner is the first step after a command request has been parsed. type CommandRunner interface { @@ -47,28 +48,28 @@ type CommandRunner interface { RunAutoplanCommand(baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User) } -//go:generate pegomock generate --package mocks -o mocks/mock_github_pull_getter.go GithubPullGetter +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_github_pull_getter.go GithubPullGetter // GithubPullGetter makes API calls to get pull requests. type GithubPullGetter interface { // GetPullRequest gets the pull request with id pullNum for the repo. - GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) + GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*github.PullRequest, error) } -//go:generate pegomock generate --package mocks -o mocks/mock_azuredevops_pull_getter.go AzureDevopsPullGetter +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_azuredevops_pull_getter.go AzureDevopsPullGetter // AzureDevopsPullGetter makes API calls to get pull requests. type AzureDevopsPullGetter interface { // GetPullRequest gets the pull request with id pullNum for the repo. - GetPullRequest(repo models.Repo, pullNum int) (*azuredevops.GitPullRequest, error) + GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*azuredevops.GitPullRequest, error) } -//go:generate pegomock generate --package mocks -o mocks/mock_gitlab_merge_request_getter.go GitlabMergeRequestGetter +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_gitlab_merge_request_getter.go GitlabMergeRequestGetter // GitlabMergeRequestGetter makes API calls to get merge requests. type GitlabMergeRequestGetter interface { // GetMergeRequest gets the pull request with the id pullNum for the repo. - GetMergeRequest(repoFullName string, pullNum int) (*gitlab.MergeRequest, error) + GetMergeRequest(logger logging.SimpleLogging, repoFullName string, pullNum int) (*gitlab.MergeRequest, error) } // CommentCommandRunner runs individual command workflows. @@ -97,6 +98,7 @@ type DefaultCommandRunner struct { GithubPullGetter GithubPullGetter AzureDevopsPullGetter AzureDevopsPullGetter GitlabMergeRequestGetter GitlabMergeRequestGetter + GiteaPullGetter *gitea.GiteaClient // User config option: Disables autoplan when a pull request is opened or updated. DisableAutoplan bool DisableAutoplanLabel string @@ -126,7 +128,7 @@ type DefaultCommandRunner struct { PreWorkflowHooksCommandRunner PreWorkflowHooksCommandRunner PostWorkflowHooksCommandRunner PostWorkflowHooksCommandRunner PullStatusFetcher PullStatusFetcher - TeamAllowlistChecker *TeamAllowlistChecker + TeamAllowlistChecker command.TeamAllowlistChecker VarFileAllowlistChecker *VarFileAllowlistChecker CommitStatusUpdater CommitStatusUpdater } @@ -134,7 +136,7 @@ type DefaultCommandRunner struct { // RunAutoplanCommand runs plan and policy_checks when a pull request is opened or updated. func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User) { if opStarted := c.Drainer.StartOp(); !opStarted { - if commentErr := c.VCSClient.CreateComment(baseRepo, pull.Num, ShutdownComment, command.Plan.String()); commentErr != nil { + if commentErr := c.VCSClient.CreateComment(c.Logger, baseRepo, pull.Num, ShutdownComment, command.Plan.String()); commentErr != nil { c.Logger.Log(logging.Error, "unable to comment that Atlantis is shutting down: %s", commentErr) } return @@ -154,13 +156,21 @@ func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo defer timer.Stop() // Check if the user who triggered the autoplan has permissions to run 'plan'. - ok, err := c.checkUserPermissions(baseRepo, user, "plan") - if err != nil { - c.Logger.Err("Unable to check user permissions: %s", err) - return - } - if !ok { - return + if c.TeamAllowlistChecker != nil && c.TeamAllowlistChecker.HasRules() { + err := c.fetchUserTeams(baseRepo, &user) + if err != nil { + c.Logger.Err("Unable to fetch user teams: %s", err) + return + } + + ok, err := c.checkUserPermissions(baseRepo, user, "plan") + if err != nil { + c.Logger.Err("Unable to check user permissions: %s", err) + return + } + if !ok { + return + } } ctx := &command.Context{ @@ -179,7 +189,7 @@ func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo return } if len(c.DisableAutoplanLabel) > 0 { - labels, err := c.VCSClient.GetPullLabels(baseRepo, pull) + labels, err := c.VCSClient.GetPullLabels(ctx.Log, baseRepo, pull) if err != nil { ctx.Log.Err("Unable to get pull labels. Proceeding with %s command.", err, command.Plan) } else if utils.SlicesContains(labels, c.DisableAutoplanLabel) { @@ -201,11 +211,11 @@ func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo // Update the plan or apply commit status to pending whilst the pre workflow hook is running so that the PR can't be merged. switch cmd.Name { case command.Plan: - if err := c.CommitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Plan); err != nil { + if err := c.CommitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Plan); err != nil { ctx.Log.Warn("unable to update plan commit status: %s", err) } case command.Apply: - if err := c.CommitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Apply); err != nil { + if err := c.CommitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Apply); err != nil { ctx.Log.Warn("unable to update apply commit status: %s", err) } } @@ -231,7 +241,7 @@ func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo // is not allowed to execute the command. func (c *DefaultCommandRunner) commentUserDoesNotHavePermissions(baseRepo models.Repo, pullNum int, user models.User, cmd *CommentCommand) { errMsg := fmt.Sprintf("```\nError: User @%s does not have permissions to execute '%s' command.\n```", user.Username, cmd.Name.String()) - if err := c.VCSClient.CreateComment(baseRepo, pullNum, errMsg, ""); err != nil { + if err := c.VCSClient.CreateComment(c.Logger, baseRepo, pullNum, errMsg, ""); err != nil { c.Logger.Err("unable to comment on pull request: %s", err) } } @@ -242,11 +252,16 @@ func (c *DefaultCommandRunner) checkUserPermissions(repo models.Repo, user model // allowlist restriction is not enabled return true, nil } - teams, err := c.VCSClient.GetTeamNamesForUser(repo, user) - if err != nil { - return false, err + ctx := models.TeamAllowlistCheckerContext{ + BaseRepo: repo, + CommandName: cmdName, + Log: c.Logger, + Pull: models.PullRequest{}, + User: user, + Verbose: false, + API: false, } - ok := c.TeamAllowlistChecker.IsCommandAllowedForAnyTeam(teams, cmdName) + ok := c.TeamAllowlistChecker.IsCommandAllowedForAnyTeam(ctx, user.Teams, cmdName) if !ok { return false, nil } @@ -269,7 +284,7 @@ func (c *DefaultCommandRunner) checkVarFilesInPlanCommandAllowlisted(cmd *Commen // wasteful) call to get the necessary data. func (c *DefaultCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHeadRepo *models.Repo, maybePull *models.PullRequest, user models.User, pullNum int, cmd *CommentCommand) { if opStarted := c.Drainer.StartOp(); !opStarted { - if commentErr := c.VCSClient.CreateComment(baseRepo, pullNum, ShutdownComment, ""); commentErr != nil { + if commentErr := c.VCSClient.CreateComment(c.Logger, baseRepo, pullNum, ShutdownComment, ""); commentErr != nil { c.Logger.Log(logging.Error, "unable to comment that Atlantis is shutting down: %s", commentErr) } return @@ -288,20 +303,28 @@ func (c *DefaultCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHead defer timer.Stop() // Check if the user who commented has the permissions to execute the 'plan' or 'apply' commands - ok, err := c.checkUserPermissions(baseRepo, user, cmd.Name.String()) - if err != nil { - c.Logger.Err("Unable to check user permissions: %s", err) - return - } - if !ok { - c.commentUserDoesNotHavePermissions(baseRepo, pullNum, user, cmd) - return + if c.TeamAllowlistChecker != nil && c.TeamAllowlistChecker.HasRules() { + err := c.fetchUserTeams(baseRepo, &user) + if err != nil { + c.Logger.Err("Unable to fetch user teams: %s", err) + return + } + + ok, err := c.checkUserPermissions(baseRepo, user, cmd.Name.String()) + if err != nil { + c.Logger.Err("Unable to check user permissions: %s", err) + return + } + if !ok { + c.commentUserDoesNotHavePermissions(baseRepo, pullNum, user, cmd) + return + } } // Check if the provided var files in a 'plan' command are allowlisted if err := c.checkVarFilesInPlanCommandAllowlisted(cmd); err != nil { errMsg := fmt.Sprintf("```\n%s\n```", err.Error()) - if commentErr := c.VCSClient.CreateComment(baseRepo, pullNum, errMsg, ""); commentErr != nil { + if commentErr := c.VCSClient.CreateComment(c.Logger, baseRepo, pullNum, errMsg, ""); commentErr != nil { c.Logger.Err("unable to comment on pull request: %s", commentErr) } return @@ -319,15 +342,16 @@ func (c *DefaultCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHead } ctx := &command.Context{ - User: user, - Log: log, - Pull: pull, - PullStatus: status, - HeadRepo: headRepo, - Scope: scope, - Trigger: command.CommentTrigger, - PolicySet: cmd.PolicySet, - ClearPolicyApproval: cmd.ClearPolicyApproval, + User: user, + Log: log, + Pull: pull, + PullStatus: status, + HeadRepo: headRepo, + Scope: scope, + Trigger: command.CommentTrigger, + PolicySet: cmd.PolicySet, + ClearPolicyApproval: cmd.ClearPolicyApproval, + TeamAllowlistChecker: c.TeamAllowlistChecker, } if !c.validateCtxAndComment(ctx, cmd.Name) { @@ -345,11 +369,11 @@ func (c *DefaultCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHead // Update the plan or apply commit status to pending whilst the pre workflow hook is running so that the PR can't be merged. switch cmd.Name { case command.Plan: - if err := c.CommitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Plan); err != nil { + if err := c.CommitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Plan); err != nil { ctx.Log.Warn("unable to update plan commit status: %s", err) } case command.Apply: - if err := c.CommitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Apply); err != nil { + if err := c.CommitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Apply); err != nil { ctx.Log.Warn("unable to update apply commit status: %s", err) } } @@ -371,26 +395,41 @@ func (c *DefaultCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHead } } -func (c *DefaultCommandRunner) getGithubData(baseRepo models.Repo, pullNum int) (models.PullRequest, models.Repo, error) { +func (c *DefaultCommandRunner) getGithubData(logger logging.SimpleLogging, baseRepo models.Repo, pullNum int) (models.PullRequest, models.Repo, error) { if c.GithubPullGetter == nil { return models.PullRequest{}, models.Repo{}, errors.New("Atlantis not configured to support GitHub") } - ghPull, err := c.GithubPullGetter.GetPullRequest(baseRepo, pullNum) + ghPull, err := c.GithubPullGetter.GetPullRequest(logger, baseRepo, pullNum) if err != nil { return models.PullRequest{}, models.Repo{}, errors.Wrap(err, "making pull request API call to GitHub") } - pull, _, headRepo, err := c.EventParser.ParseGithubPull(ghPull) + pull, _, headRepo, err := c.EventParser.ParseGithubPull(logger, ghPull) if err != nil { return pull, headRepo, errors.Wrap(err, "extracting required fields from comment data") } return pull, headRepo, nil } -func (c *DefaultCommandRunner) getGitlabData(baseRepo models.Repo, pullNum int) (models.PullRequest, error) { +func (c *DefaultCommandRunner) getGiteaData(logger logging.SimpleLogging, baseRepo models.Repo, pullNum int) (models.PullRequest, models.Repo, error) { + if c.GiteaPullGetter == nil { + return models.PullRequest{}, models.Repo{}, errors.New("Atlantis not configured to support Gitea") + } + giteaPull, err := c.GiteaPullGetter.GetPullRequest(logger, baseRepo, pullNum) + if err != nil { + return models.PullRequest{}, models.Repo{}, errors.Wrap(err, "making pull request API call to Gitea") + } + pull, _, headRepo, err := c.EventParser.ParseGiteaPull(giteaPull) + if err != nil { + return pull, headRepo, errors.Wrap(err, "extracting required fields from comment data") + } + return pull, headRepo, nil +} + +func (c *DefaultCommandRunner) getGitlabData(logger logging.SimpleLogging, baseRepo models.Repo, pullNum int) (models.PullRequest, error) { if c.GitlabMergeRequestGetter == nil { return models.PullRequest{}, errors.New("Atlantis not configured to support GitLab") } - mr, err := c.GitlabMergeRequestGetter.GetMergeRequest(baseRepo.FullName, pullNum) + mr, err := c.GitlabMergeRequestGetter.GetMergeRequest(logger, baseRepo.FullName, pullNum) if err != nil { return models.PullRequest{}, errors.Wrap(err, "making merge request API call to GitLab") } @@ -398,11 +437,11 @@ func (c *DefaultCommandRunner) getGitlabData(baseRepo models.Repo, pullNum int) return pull, nil } -func (c *DefaultCommandRunner) getAzureDevopsData(baseRepo models.Repo, pullNum int) (models.PullRequest, models.Repo, error) { +func (c *DefaultCommandRunner) getAzureDevopsData(logger logging.SimpleLogging, baseRepo models.Repo, pullNum int) (models.PullRequest, models.Repo, error) { if c.AzureDevopsPullGetter == nil { return models.PullRequest{}, models.Repo{}, errors.New("atlantis not configured to support Azure DevOps") } - adPull, err := c.AzureDevopsPullGetter.GetPullRequest(baseRepo, pullNum) + adPull, err := c.AzureDevopsPullGetter.GetPullRequest(logger, baseRepo, pullNum) if err != nil { return models.PullRequest{}, models.Repo{}, errors.Wrap(err, "making pull request API call to Azure DevOps") } @@ -435,9 +474,9 @@ func (c *DefaultCommandRunner) ensureValidRepoMetadata( switch baseRepo.VCSHost.Type { case models.Github: - pull, headRepo, err = c.getGithubData(baseRepo, pullNum) + pull, headRepo, err = c.getGithubData(log, baseRepo, pullNum) case models.Gitlab: - pull, err = c.getGitlabData(baseRepo, pullNum) + pull, err = c.getGitlabData(log, baseRepo, pullNum) case models.BitbucketCloud, models.BitbucketServer: if maybePull == nil { err = errors.New("pull request should not be nil–this is a bug") @@ -445,14 +484,16 @@ func (c *DefaultCommandRunner) ensureValidRepoMetadata( } pull = *maybePull case models.AzureDevops: - pull, headRepo, err = c.getAzureDevopsData(baseRepo, pullNum) + pull, headRepo, err = c.getAzureDevopsData(log, baseRepo, pullNum) + case models.Gitea: + pull, headRepo, err = c.getGiteaData(log, baseRepo, pullNum) default: err = errors.New("Unknown VCS type–this is a bug") } if err != nil { log.Err(err.Error()) - if commentErr := c.VCSClient.CreateComment(baseRepo, pullNum, fmt.Sprintf("`Error: %s`", err), ""); commentErr != nil { + if commentErr := c.VCSClient.CreateComment(c.Logger, baseRepo, pullNum, fmt.Sprintf("`Error: %s`", err), ""); commentErr != nil { log.Err("unable to comment: %s", commentErr) } } @@ -460,13 +501,23 @@ func (c *DefaultCommandRunner) ensureValidRepoMetadata( return } +func (c *DefaultCommandRunner) fetchUserTeams(repo models.Repo, user *models.User) error { + teams, err := c.VCSClient.GetTeamNamesForUser(repo, *user) + if err != nil { + return err + } + + user.Teams = teams + return nil +} + func (c *DefaultCommandRunner) validateCtxAndComment(ctx *command.Context, commandName command.Name) bool { if !c.AllowForkPRs && ctx.HeadRepo.Owner != ctx.Pull.BaseRepo.Owner { if c.SilenceForkPRErrors { return false } ctx.Log.Info("command was run on a fork pull request which is disallowed") - if err := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, fmt.Sprintf("Atlantis commands can't be run on fork pull requests. To enable, set --%s or, to disable this message, set --%s", c.AllowForkPRsFlag, c.SilenceForkPRErrorsFlag), ""); err != nil { + if err := c.VCSClient.CreateComment(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull.Num, fmt.Sprintf("Atlantis commands can't be run on fork pull requests. To enable, set --%s or, to disable this message, set --%s", c.AllowForkPRsFlag, c.SilenceForkPRErrorsFlag), ""); err != nil { ctx.Log.Err("unable to comment: %s", err) } return false @@ -474,7 +525,7 @@ func (c *DefaultCommandRunner) validateCtxAndComment(ctx *command.Context, comma if ctx.Pull.State != models.OpenPullState && commandName != command.Unlock { ctx.Log.Info("command was run on closed pull request") - if err := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, "Atlantis commands can't be run on closed pull requests", ""); err != nil { + if err := c.VCSClient.CreateComment(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull.Num, "Atlantis commands can't be run on closed pull requests", ""); err != nil { ctx.Log.Err("unable to comment: %s", err) } return false @@ -495,6 +546,7 @@ func (c *DefaultCommandRunner) logPanics(baseRepo models.Repo, pullNum int, logg stack := recovery.Stack(3) logger.Err("PANIC: %s\n%s", err, stack) if commentErr := c.VCSClient.CreateComment( + logger, baseRepo, pullNum, fmt.Sprintf("**Error: goroutine panic. This is a bug.**\n```\n%s\n%s```", err, stack), diff --git a/server/events/command_runner_internal_test.go b/server/events/command_runner_internal_test.go index 1241fecc55..02a54c3870 100644 --- a/server/events/command_runner_internal_test.go +++ b/server/events/command_runner_internal_test.go @@ -5,6 +5,7 @@ import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) @@ -273,7 +274,7 @@ type MockCSU struct { Called bool } -func (m *MockCSU) UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, command command.Name, numSuccess int, numTotal int) error { +func (m *MockCSU) UpdateCombinedCount(_ logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, command command.Name, numSuccess int, numTotal int) error { m.Called = true m.CalledRepo = repo m.CalledPull = pull @@ -284,7 +285,7 @@ func (m *MockCSU) UpdateCombinedCount(repo models.Repo, pull models.PullRequest, return nil } -func (m *MockCSU) UpdateCombined(_ models.Repo, _ models.PullRequest, _ models.CommitStatus, _ command.Name) error { +func (m *MockCSU) UpdateCombined(_ logging.SimpleLogging, _ models.Repo, _ models.PullRequest, _ models.CommitStatus, _ command.Name) error { return nil } @@ -292,10 +293,10 @@ func (m *MockCSU) UpdateProject(_ command.ProjectContext, _ command.Name, _ mode return nil } -func (m *MockCSU) UpdatePreWorkflowHook(_ models.PullRequest, _ models.CommitStatus, _ string, _ string, _ string) error { +func (m *MockCSU) UpdatePreWorkflowHook(_ logging.SimpleLogging, _ models.PullRequest, _ models.CommitStatus, _ string, _ string, _ string) error { return nil } -func (m *MockCSU) UpdatePostWorkflowHook(_ models.PullRequest, _ models.CommitStatus, _ string, _ string, _ string) error { +func (m *MockCSU) UpdatePostWorkflowHook(_ logging.SimpleLogging, _ models.PullRequest, _ models.CommitStatus, _ string, _ string, _ string) error { return nil } diff --git a/server/events/command_runner_test.go b/server/events/command_runner_test.go index d9c8451570..fa2dee091c 100644 --- a/server/events/command_runner_test.go +++ b/server/events/command_runner_test.go @@ -27,7 +27,7 @@ import ( "github.com/runatlantis/atlantis/server/logging" "github.com/runatlantis/atlantis/server/metrics" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" . "github.com/petergtz/pegomock/v4" lockingmocks "github.com/runatlantis/atlantis/server/core/locking/mocks" "github.com/runatlantis/atlantis/server/events" @@ -261,37 +261,42 @@ func setup(t *testing.T, options ...func(testConfig *TestConfig)) *vcsmocks.Mock func TestRunCommentCommand_LogPanics(t *testing.T) { t.Log("if there is a panic it is commented back on the pull request") vcsClient := setup(t) - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenPanic("panic test - if you're seeing this in a test failure this isn't the failing test") + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenPanic( + "panic test - if you're seeing this in a test failure this isn't the failing test") ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, 1, &events.CommentCommand{Name: command.Plan}) - _, _, comment, _ := vcsClient.VerifyWasCalledOnce().CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetCapturedArguments() + _, _, _, comment, _ := vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetCapturedArguments() Assert(t, strings.Contains(comment, "Error: goroutine panic"), fmt.Sprintf("comment should be about a goroutine panic but was %q", comment)) } func TestRunCommentCommand_GithubPullErr(t *testing.T) { t.Log("if getting the github pull request fails an error should be logged") vcsClient := setup(t) - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(nil, errors.New("err")) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(nil, errors.New("err")) ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, testdata.Pull.Num, "`Error: making pull request API call to GitHub: err`", "") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("`Error: making pull request API call to GitHub: err`"), Eq("")) } func TestRunCommentCommand_GitlabMergeRequestErr(t *testing.T) { t.Log("if getting the gitlab merge request fails an error should be logged") vcsClient := setup(t) - When(gitlabGetter.GetMergeRequest(testdata.GitlabRepo.FullName, testdata.Pull.Num)).ThenReturn(nil, errors.New("err")) + When(gitlabGetter.GetMergeRequest(Any[logging.SimpleLogging](), Eq(testdata.GitlabRepo.FullName), Eq(testdata.Pull.Num))).ThenReturn(nil, errors.New("err")) ch.RunCommentCommand(testdata.GitlabRepo, &testdata.GitlabRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GitlabRepo, testdata.Pull.Num, "`Error: making merge request API call to GitLab: err`", "") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GitlabRepo), Eq(testdata.Pull.Num), Eq("`Error: making merge request API call to GitLab: err`"), Eq("")) } func TestRunCommentCommand_GithubPullParseErr(t *testing.T) { t.Log("if parsing the returned github pull request fails an error should be logged") vcsClient := setup(t) var pull github.PullRequest - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(testdata.Pull, testdata.GithubRepo, testdata.GitlabRepo, errors.New("err")) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(testdata.Pull, testdata.GithubRepo, testdata.GitlabRepo, errors.New("err")) ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, testdata.Pull.Num, "`Error: extracting required fields from comment data: err`", "") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("`Error: extracting required fields from comment data: err`"), Eq("")) } func TestRunCommentCommand_TeamAllowListChecker(t *testing.T) { @@ -304,29 +309,31 @@ func TestRunCommentCommand_TeamAllowListChecker(t *testing.T) { BaseRepo: testdata.GithubRepo, State: models.OpenPullState, } - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) vcsClient.VerifyWasCalled(Never()).GetTeamNamesForUser(testdata.GithubRepo, testdata.User) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, modelPull.Num, "Ran Plan for 0 projects:", "plan") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull.Num), Eq("Ran Plan for 0 projects:"), Eq("plan")) }) t.Run("no rules", func(t *testing.T) { vcsClient := setup(t) // by default these are false so don't need to reset - ch.TeamAllowlistChecker = &events.TeamAllowlistChecker{} + ch.TeamAllowlistChecker = &command.DefaultTeamAllowlistChecker{} var pull github.PullRequest modelPull := models.PullRequest{ BaseRepo: testdata.GithubRepo, State: models.OpenPullState, } - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) vcsClient.VerifyWasCalled(Never()).GetTeamNamesForUser(testdata.GithubRepo, testdata.User) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, modelPull.Num, "Ran Plan for 0 projects:", "plan") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull.Num), Eq("Ran Plan for 0 projects:"), Eq("plan")) }) } @@ -342,16 +349,17 @@ func TestRunCommentCommand_ForkPRDisabled(t *testing.T) { BaseRepo: testdata.GithubRepo, State: models.OpenPullState, } - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) headRepo := testdata.GithubRepo headRepo.FullName = "forkrepo/atlantis" headRepo.Owner = "forkrepo" - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, headRepo, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, headRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) commentMessage := fmt.Sprintf("Atlantis commands can't be run on fork pull requests. To enable, set --%s or, to disable this message, set --%s", ch.AllowForkPRsFlag, ch.SilenceForkPRErrorsFlag) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, modelPull.Num, commentMessage, "") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull.Num), Eq(commentMessage), Eq("")) } func TestRunCommentCommand_ForkPRDisabled_SilenceEnabled(t *testing.T) { @@ -361,15 +369,16 @@ func TestRunCommentCommand_ForkPRDisabled_SilenceEnabled(t *testing.T) { ch.SilenceForkPRErrors = true var pull github.PullRequest modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) headRepo := testdata.GithubRepo headRepo.FullName = "forkrepo/atlantis" headRepo.Owner = "forkrepo" - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, headRepo, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, headRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } func TestRunCommentCommandPlan_NoProjects_SilenceEnabled(t *testing.T) { @@ -378,12 +387,14 @@ func TestRunCommentCommandPlan_NoProjects_SilenceEnabled(t *testing.T) { planCommandRunner.SilenceNoProjects = true var pull github.PullRequest modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq[models.CommitStatus](models.SuccessCommitStatus), @@ -400,12 +411,14 @@ func TestRunCommentCommandPlan_NoProjectsTarget_SilenceEnabled(t *testing.T) { planCommandRunner.SilenceNoProjects = true var pull github.PullRequest modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan, ProjectName: "meow"}) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq[models.CommitStatus](models.SuccessCommitStatus), @@ -421,12 +434,14 @@ func TestRunCommentCommandApply_NoProjects_SilenceEnabled(t *testing.T) { applyCommandRunner.SilenceNoProjects = true var pull github.PullRequest modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Apply}) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq[models.CommitStatus](models.SuccessCommitStatus), @@ -442,12 +457,14 @@ func TestRunCommentCommandApprovePolicy_NoProjects_SilenceEnabled(t *testing.T) approvePoliciesCommandRunner.SilenceNoProjects = true var pull github.PullRequest modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.ApprovePolicies}) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq[models.CommitStatus](models.SuccessCommitStatus), @@ -463,11 +480,11 @@ func TestRunCommentCommandUnlock_NoProjects_SilenceEnabled(t *testing.T) { unlockCommandRunner.SilenceNoProjects = true var pull github.PullRequest modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } func TestRunCommentCommandImport_NoProjects_SilenceEnabled(t *testing.T) { @@ -476,11 +493,11 @@ func TestRunCommentCommandImport_NoProjects_SilenceEnabled(t *testing.T) { importCommandRunner.SilenceNoProjects = true var pull github.PullRequest modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Import}) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } func TestRunCommentCommand_DisableApplyAllDisabled(t *testing.T) { @@ -492,11 +509,13 @@ func TestRunCommentCommand_DisableApplyAllDisabled(t *testing.T) { State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, modelPull.Num, &events.CommentCommand{Name: command.Apply}) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, modelPull.Num, "**Error:** Running `atlantis apply` without flags is disabled. You must specify which project to apply via the `-d `, `-w ` or `-p ` flags.", "apply") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull.Num), + Eq("**Error:** Running `atlantis apply` without flags is disabled. You must specify which project to apply via the `-d `, `-w ` or `-p ` flags."), Eq("apply")) } func TestRunCommentCommand_DisableAutoplan(t *testing.T) { @@ -538,11 +557,12 @@ func TestRunCommentCommand_DisableAutoplanLabel(t *testing.T) { CommandName: command.Plan, }, }, nil) - When(ch.VCSClient.GetPullLabels(testdata.GithubRepo, modelPull)).ThenReturn([]string{"disable-auto-plan", "need-help"}, nil) + When(ch.VCSClient.GetPullLabels( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull))).ThenReturn([]string{"disable-auto-plan", "need-help"}, nil) ch.RunAutoplanCommand(testdata.GithubRepo, testdata.GithubRepo, modelPull, testdata.User) projectCommandBuilder.VerifyWasCalled(Never()).BuildAutoplanCommands(Any[*command.Context]()) - vcsClient.VerifyWasCalledOnce().GetPullLabels(testdata.GithubRepo, modelPull) + vcsClient.VerifyWasCalledOnce().GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull)) } func TestRunCommentCommand_DisableAutoplanLabel_PullNotLabeled(t *testing.T) { @@ -562,11 +582,11 @@ func TestRunCommentCommand_DisableAutoplanLabel_PullNotLabeled(t *testing.T) { CommandName: command.Plan, }, }, nil) - When(ch.VCSClient.GetPullLabels(testdata.GithubRepo, modelPull)).ThenReturn(nil, nil) + When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull))).ThenReturn(nil, nil) ch.RunAutoplanCommand(testdata.GithubRepo, testdata.GithubRepo, modelPull, testdata.User) projectCommandBuilder.VerifyWasCalled(Once()).BuildAutoplanCommands(Any[*command.Context]()) - vcsClient.VerifyWasCalledOnce().GetPullLabels(testdata.GithubRepo, modelPull) + vcsClient.VerifyWasCalledOnce().GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull)) } func TestRunCommentCommand_ClosedPull(t *testing.T) { @@ -577,11 +597,12 @@ func TestRunCommentCommand_ClosedPull(t *testing.T) { State: github.String("closed"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.ClosedPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, modelPull.Num, "Atlantis commands can't be run on closed pull requests", "") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull.Num), Eq("Atlantis commands can't be run on closed pull requests"), Eq("")) } func TestRunCommentCommand_MatchedBranch(t *testing.T) { @@ -594,11 +615,12 @@ func TestRunCommentCommand_MatchedBranch(t *testing.T) { }) var pull github.PullRequest modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, BaseBranch: "main"} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, modelPull.Num, "Ran Plan for 0 projects:", "plan") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull.Num), Eq("Ran Plan for 0 projects:"), Eq("plan")) } func TestRunCommentCommand_UnmatchedBranch(t *testing.T) { @@ -611,11 +633,11 @@ func TestRunCommentCommand_UnmatchedBranch(t *testing.T) { }) var pull github.PullRequest modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, BaseBranch: "foo"} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(&pull, nil) - When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(&pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(&pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } func TestRunUnlockCommand_VCSComment(t *testing.T) { @@ -644,13 +666,19 @@ func TestRunUnlockCommand_VCSComment(t *testing.T) { State: tc.prState, } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) - - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) - - deleteLockCommand.VerifyWasCalledOnce().DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, testdata.Pull.Num, "All Atlantis locks for this PR have been unlocked and plans discarded", "unlock") + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) + + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) + + deleteLockCommand.VerifyWasCalledOnce().DeleteLocksByPull(Any[logging.SimpleLogging](), + Eq(testdata.GithubRepo.FullName), Eq(testdata.Pull.Num)) + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), + Eq("All Atlantis locks for this PR have been unlocked and plans discarded"), Eq("unlock")) }) } } @@ -664,13 +692,18 @@ func TestRunUnlockCommandFail_VCSComment(t *testing.T) { State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) - When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err")) - - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) - - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, testdata.Pull.Num, "Failed to delete PR locks", "unlock") + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) + When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName), + Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err")) + + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) + + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("Failed to delete PR locks"), Eq("unlock")) } func TestRunUnlockCommandFail_DisableUnlockLabel(t *testing.T) { @@ -683,14 +716,20 @@ func TestRunUnlockCommandFail_DisableUnlockLabel(t *testing.T) { State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) - When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err")) - When(ch.VCSClient.GetPullLabels(testdata.GithubRepo, modelPull)).ThenReturn([]string{doNotUnlock, "need-help"}, nil) - - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) - - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, testdata.Pull.Num, "Not allowed to unlock PR with "+doNotUnlock+" label", "unlock") + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) + When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName), + Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err")) + When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil) + + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) + + vcsClient.VerifyWasCalledOnce().CreateComment(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num), Eq("Not allowed to unlock PR with "+doNotUnlock+" label"), Eq("unlock")) } func TestRunUnlockCommandFail_GetLabelsFail(t *testing.T) { @@ -701,14 +740,20 @@ func TestRunUnlockCommandFail_GetLabelsFail(t *testing.T) { State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) - When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err")) - When(ch.VCSClient.GetPullLabels(testdata.GithubRepo, modelPull)).ThenReturn(nil, errors.New("err")) - - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) - - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, testdata.Pull.Num, "Failed to retrieve PR labels... Not unlocking", "unlock") + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) + When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName), + Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err")) + When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(modelPull))).ThenReturn(nil, errors.New("err")) + + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) + + vcsClient.VerifyWasCalledOnce().CreateComment(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), + Eq("Failed to retrieve PR labels... Not unlocking"), Eq("unlock")) } func TestRunUnlockCommandDoesntRetrieveLabelsIfDisableUnlockLabelNotSet(t *testing.T) { @@ -721,15 +766,20 @@ func TestRunUnlockCommandDoesntRetrieveLabelsIfDisableUnlockLabelNotSet(t *testi State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) - When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err")) - When(ch.VCSClient.GetPullLabels(testdata.GithubRepo, modelPull)).ThenReturn([]string{doNotUnlock, "need-help"}, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) + When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName), + Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err")) + When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil) unlockCommandRunner.DisableUnlockLabel = "" - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) - vcsClient.VerifyWasCalled(Never()).GetPullLabels(testdata.GithubRepo, modelPull) + vcsClient.VerifyWasCalled(Never()).GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull)) } func TestRunAutoplanCommand_DeletePlans(t *testing.T) { @@ -817,8 +867,8 @@ func TestRunCommentCommand_FailedPreWorkflowHook_FailOnPreWorkflowHookError_Fals When(workingDir.GetPullDir(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(tmp, nil) pull := &github.PullRequest{State: github.String("open")} modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) When(preWorkflowHooksCommandRunner.RunPreHooks(Any[*command.Context](), Any[*events.CommentCommand]())).ThenReturn(errors.New("err")) testdata.Pull.BaseRepo = testdata.GithubRepo ch.FailOnPreWorkflowHookError = false @@ -859,8 +909,8 @@ func TestRunGenericPlanCommand_DeletePlans(t *testing.T) { When(workingDir.GetPullDir(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(tmp, nil) pull := &github.PullRequest{State: github.String("open")} modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) testdata.Pull.BaseRepo = testdata.GithubRepo ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) pendingPlanFinder.VerifyWasCalledOnce().DeletePlans(tmp) @@ -953,8 +1003,8 @@ func TestRunGenericPlanCommand_DiscardApprovals(t *testing.T) { When(workingDir.GetPullDir(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(tmp, nil) pull := &github.PullRequest{State: github.String("open")} modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) testdata.Pull.BaseRepo = testdata.GithubRepo ch.RunCommentCommand(testdata.GithubRepo, nil, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) pendingPlanFinder.VerifyWasCalledOnce().DeletePlans(tmp) @@ -983,8 +1033,8 @@ func TestFailedApprovalCreatesFailedStatusUpdate(t *testing.T) { State: models.OpenPullState, Num: testdata.Pull.Num, } - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) When(projectCommandBuilder.BuildApprovePoliciesCommands(Any[*command.Context](), Any[*events.CommentCommand]())).ThenReturn([]command.ProjectContext{ { @@ -999,6 +1049,7 @@ func TestFailedApprovalCreatesFailedStatusUpdate(t *testing.T) { ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, &testdata.Pull, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.ApprovePolicies}) commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq[models.CommitStatus](models.SuccessCommitStatus), @@ -1028,8 +1079,8 @@ func TestApprovedPoliciesUpdateFailedPolicyStatus(t *testing.T) { State: models.OpenPullState, Num: testdata.Pull.Num, } - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) When(projectCommandBuilder.BuildApprovePoliciesCommands(Any[*command.Context](), Any[*events.CommentCommand]())).ThenReturn([]command.ProjectContext{ { @@ -1054,6 +1105,7 @@ func TestApprovedPoliciesUpdateFailedPolicyStatus(t *testing.T) { ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, &testdata.Pull, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.ApprovePolicies}) commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq[models.CommitStatus](models.SuccessCommitStatus), @@ -1083,8 +1135,8 @@ func TestApplyMergeablityWhenPolicyCheckFails(t *testing.T) { State: models.OpenPullState, Num: testdata.Pull.Num, } - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) _, _ = boltDB.UpdatePullWithResults(modelPull, []command.ProjectResult{ { @@ -1096,7 +1148,7 @@ func TestApplyMergeablityWhenPolicyCheckFails(t *testing.T) { }, }) - When(ch.VCSClient.PullIsMergeable(testdata.GithubRepo, modelPull, "atlantis-test")).ThenReturn(true, nil) + When(ch.VCSClient.PullIsMergeable(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull), Eq("atlantis-test"), Eq([]string{}))).ThenReturn(true, nil) When(projectCommandBuilder.BuildApplyCommands(Any[*command.Context](), Any[*events.CommentCommand]())).Then(func(args []Param) ReturnValues { return ReturnValues{ @@ -1125,8 +1177,8 @@ func TestApplyWithAutoMerge_VSCMerge(t *testing.T) { State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState} - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) autoMerger.GlobalAutomerge = true defer func() { autoMerger.GlobalAutomerge = false }() @@ -1135,7 +1187,7 @@ func TestApplyWithAutoMerge_VSCMerge(t *testing.T) { } ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Apply}) - vcsClient.VerifyWasCalledOnce().MergePull(modelPull, pullOptions) + vcsClient.VerifyWasCalledOnce().MergePull(Any[logging.SimpleLogging](), Eq(modelPull), Eq(pullOptions)) } func TestRunApply_DiscardedProjects(t *testing.T) { @@ -1167,13 +1219,13 @@ func TestRunApply_DiscardedProjects(t *testing.T) { ghPull := &github.PullRequest{ State: github.String("open"), } - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(ghPull, nil) - When(eventParsing.ParseGithubPull(ghPull)).ThenReturn(pull, pull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(ghPull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(ghPull))).ThenReturn(pull, pull.BaseRepo, testdata.GithubRepo, nil) When(workingDir.GetPullDir(Any[models.Repo](), Any[models.PullRequest]())). ThenReturn(tmp, nil) ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, &pull, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Apply}) - vcsClient.VerifyWasCalled(Never()).MergePull(Any[models.PullRequest](), Any[models.PullRequestOptions]()) + vcsClient.VerifyWasCalled(Never()).MergePull(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.PullRequestOptions]()) } func TestRunCommentCommand_DrainOngoing(t *testing.T) { @@ -1181,15 +1233,17 @@ func TestRunCommentCommand_DrainOngoing(t *testing.T) { vcsClient := setup(t) drainer.ShutdownBlocking() ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, nil) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, testdata.Pull.Num, "Atlantis server is shutting down, please try again later.", "") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("Atlantis server is shutting down, please try again later."), Eq("")) } func TestRunCommentCommand_DrainNotOngoing(t *testing.T) { t.Log("if drain is not ongoing then remove ongoing operation must be called even if panic occurred") setup(t) - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenPanic("panic test - if you're seeing this in a test failure this isn't the failing test") + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenPanic( + "panic test - if you're seeing this in a test failure this isn't the failing test") ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Plan}) - githubGetter.VerifyWasCalledOnce().GetPullRequest(testdata.GithubRepo, testdata.Pull.Num) + githubGetter.VerifyWasCalledOnce().GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num)) Equals(t, 0, drainer.GetStatus().InProgressOps) } @@ -1198,7 +1252,8 @@ func TestRunAutoplanCommand_DrainOngoing(t *testing.T) { vcsClient := setup(t) drainer.ShutdownBlocking() ch.RunAutoplanCommand(testdata.GithubRepo, testdata.GithubRepo, testdata.Pull, testdata.User) - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, testdata.Pull.Num, "Atlantis server is shutting down, please try again later.", "plan") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("Atlantis server is shutting down, please try again later."), Eq("plan")) } func TestRunAutoplanCommand_DrainNotOngoing(t *testing.T) { diff --git a/server/events/comment_parser.go b/server/events/comment_parser.go index c4ec87bb6d..829c15ced9 100644 --- a/server/events/comment_parser.go +++ b/server/events/comment_parser.go @@ -41,6 +41,8 @@ const ( policySetFlagShort = "" autoMergeDisabledFlagLong = "auto-merge-disabled" autoMergeDisabledFlagShort = "" + autoMergeMethodFlagLong = "auto-merge-method" + autoMergeMethodFlagShort = "" verboseFlagLong = "verbose" verboseFlagShort = "" clearPolicyApprovalFlagLong = "clear-policy-approval" @@ -70,7 +72,7 @@ type CommentBuilder interface { // BuildPlanComment builds a plan comment for the specified args. BuildPlanComment(repoRelDir string, workspace string, project string, commentArgs []string) string // BuildApplyComment builds an apply comment for the specified args. - BuildApplyComment(repoRelDir string, workspace string, project string, autoMergeDisabled bool) string + BuildApplyComment(repoRelDir string, workspace string, project string, autoMergeDisabled bool, autoMergeMethod string) string // BuildApprovePoliciesComment builds an approve_policies comment for the specified args. BuildApprovePoliciesComment(repoRelDir string, workspace string, project string) string } @@ -79,6 +81,7 @@ type CommentBuilder interface { type CommentParser struct { GithubUser string GitlabUser string + GiteaUser string BitbucketUser string AzureDevopsUser string ExecutableName string @@ -86,7 +89,7 @@ type CommentParser struct { } // NewCommentParser returns a CommentParser -func NewCommentParser(githubUser, gitlabUser, bitbucketUser, azureDevopsUser, executableName string, allowCommands []command.Name) *CommentParser { +func NewCommentParser(githubUser, gitlabUser, giteaUser, bitbucketUser, azureDevopsUser, executableName string, allowCommands []command.Name) *CommentParser { var commentAllowCommands []command.Name for _, acceptableCommand := range command.AllCommentCommands { for _, allowCommand := range allowCommands { @@ -100,6 +103,7 @@ func NewCommentParser(githubUser, gitlabUser, bitbucketUser, azureDevopsUser, ex return &CommentParser{ GithubUser: githubUser, GitlabUser: gitlabUser, + GiteaUser: giteaUser, BitbucketUser: bitbucketUser, AzureDevopsUser: azureDevopsUser, ExecutableName: executableName, @@ -174,6 +178,8 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com vcsUser = e.GithubUser case models.Gitlab: vcsUser = e.GitlabUser + case models.Gitea: + vcsUser = e.GiteaUser case models.BitbucketCloud, models.BitbucketServer: vcsUser = e.BitbucketUser case models.AzureDevops: @@ -222,7 +228,9 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com var project string var policySet string var clearPolicyApproval bool - var verbose, autoMergeDisabled bool + var verbose bool + var autoMergeDisabled bool + var autoMergeMethod string var flagSet *pflag.FlagSet var name command.Name @@ -244,6 +252,7 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com flagSet.StringVarP(&dir, dirFlagLong, dirFlagShort, "", "Apply the plan for this directory, relative to root of repo, ex. 'child/dir'.") flagSet.StringVarP(&project, projectFlagLong, projectFlagShort, "", "Apply the plan for this project. Refers to the name of the project configured in a repo config file. Cannot be used at same time as workspace or dir flags.") flagSet.BoolVarP(&autoMergeDisabled, autoMergeDisabledFlagLong, autoMergeDisabledFlagShort, false, "Disable automerge after apply.") + flagSet.StringVarP(&autoMergeMethod, autoMergeMethodFlagLong, autoMergeMethodFlagShort, "", "Specifies the merge method for the VCS if automerge is enabled. (Currently only implemented for GitHub)") flagSet.BoolVarP(&verbose, verboseFlagLong, verboseFlagShort, false, "Append Atlantis log to comment.") case command.ApprovePolicies.String(): name = command.ApprovePolicies @@ -313,8 +322,20 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com return CommentParseResult{CommentResponse: e.errMarkdown(err, cmd, flagSet)} } + if autoMergeMethod != "" { + if autoMergeDisabled { + err := fmt.Sprintf("cannot use --%s at the same time as --%s", autoMergeMethodFlagLong, autoMergeDisabledFlagLong) + return CommentParseResult{CommentResponse: e.errMarkdown(err, cmd, flagSet)} + } + + if vcsHost != models.Github { + err := fmt.Sprintf("--%s is not currently implemented for %s", autoMergeMethodFlagLong, vcsHost.String()) + return CommentParseResult{CommentResponse: e.errMarkdown(err, cmd, flagSet)} + } + } + return CommentParseResult{ - Command: NewCommentCommand(dir, extraArgs, name, subName, verbose, autoMergeDisabled, workspace, project, policySet, clearPolicyApproval), + Command: NewCommentCommand(dir, extraArgs, name, subName, verbose, autoMergeDisabled, autoMergeMethod, workspace, project, policySet, clearPolicyApproval), } } @@ -383,7 +404,7 @@ func (e *CommentParser) parseArgs(name command.Name, args []string, flagSet *pfl // BuildPlanComment builds a plan comment for the specified args. func (e *CommentParser) BuildPlanComment(repoRelDir string, workspace string, project string, commentArgs []string) string { - flags := e.buildFlags(repoRelDir, workspace, project, false) + flags := e.buildFlags(repoRelDir, workspace, project, false, "") commentFlags := "" if len(commentArgs) > 0 { var flagsWithoutQuotes []string @@ -398,18 +419,18 @@ func (e *CommentParser) BuildPlanComment(repoRelDir string, workspace string, pr } // BuildApplyComment builds an apply comment for the specified args. -func (e *CommentParser) BuildApplyComment(repoRelDir string, workspace string, project string, autoMergeDisabled bool) string { - flags := e.buildFlags(repoRelDir, workspace, project, autoMergeDisabled) +func (e *CommentParser) BuildApplyComment(repoRelDir string, workspace string, project string, autoMergeDisabled bool, autoMergeMethod string) string { + flags := e.buildFlags(repoRelDir, workspace, project, autoMergeDisabled, autoMergeMethod) return fmt.Sprintf("%s %s%s", e.ExecutableName, command.Apply.String(), flags) } // BuildApprovePoliciesComment builds an apply comment for the specified args. func (e *CommentParser) BuildApprovePoliciesComment(repoRelDir string, workspace string, project string) string { - flags := e.buildFlags(repoRelDir, workspace, project, false) + flags := e.buildFlags(repoRelDir, workspace, project, false, "") return fmt.Sprintf("%s %s%s", e.ExecutableName, command.ApprovePolicies.String(), flags) } -func (e *CommentParser) buildFlags(repoRelDir string, workspace string, project string, autoMergeDisabled bool) string { +func (e *CommentParser) buildFlags(repoRelDir string, workspace string, project string, autoMergeDisabled bool, autoMergeMethod string) string { // Add quotes if dir has spaces. if strings.Contains(repoRelDir, " ") { repoRelDir = fmt.Sprintf("%q", repoRelDir) @@ -437,6 +458,9 @@ func (e *CommentParser) buildFlags(repoRelDir string, workspace string, project if autoMergeDisabled { flags = fmt.Sprintf("%s --%s", flags, autoMergeDisabledFlagLong) } + if autoMergeMethod != "" { + flags = fmt.Sprintf("%s --%s %s", flags, autoMergeMethodFlagLong, autoMergeMethod) + } return flags } diff --git a/server/events/comment_parser_test.go b/server/events/comment_parser_test.go index 9c4b19d4f5..88ededcfff 100644 --- a/server/events/comment_parser_test.go +++ b/server/events/comment_parser_test.go @@ -28,6 +28,7 @@ import ( var commentParser = events.CommentParser{ GithubUser: "github-user", GitlabUser: "gitlab-user", + GiteaUser: "gitea-user", ExecutableName: "atlantis", AllowCommands: command.AllCommentCommands, } @@ -36,6 +37,7 @@ func TestNewCommentParser(t *testing.T) { type args struct { githubUser string gitlabUser string + giteaUser string bitbucketUser string azureDevopsUser string executableName string @@ -68,7 +70,7 @@ func TestNewCommentParser(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - assert.Equalf(t, tt.want, events.NewCommentParser(tt.args.githubUser, tt.args.gitlabUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands), "NewCommentParser(%v, %v, %v, %v, %v, %v)", tt.args.githubUser, tt.args.gitlabUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands) + assert.Equalf(t, tt.want, events.NewCommentParser(tt.args.githubUser, tt.args.gitlabUser, tt.args.giteaUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands), "NewCommentParser(%v, %v, %v, %v, %v, %v)", tt.args.githubUser, tt.args.gitlabUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands) }) } } @@ -266,6 +268,7 @@ func TestParse_InvalidCommand(t *testing.T) { cp := events.NewCommentParser( "github-user", "gitlab-user", + "gitea-user", "bitbucket-user", "azure-devops-user", "atlantis", @@ -726,6 +729,7 @@ func TestBuildPlanApplyVersionComment(t *testing.T) { workspace string project string autoMergeDisabled bool + autoMergeMethod string commentArgs []string expPlanFlags string expApplyFlags string @@ -821,6 +825,16 @@ func TestBuildPlanApplyVersionComment(t *testing.T) { expApplyFlags: "-d dir -w workspace --auto-merge-disabled", expVersionFlags: "-d dir -w workspace", }, + { + repoRelDir: "dir", + workspace: "workspace", + project: "", + autoMergeMethod: "squash", + commentArgs: []string{`"arg1"`, `"arg2"`, `arg3`}, + expPlanFlags: "-d dir -w workspace -- arg1 arg2 arg3", + expApplyFlags: "-d dir -w workspace --auto-merge-method squash", + expVersionFlags: "-d dir -w workspace", + }, } for _, c := range cases { @@ -831,7 +845,7 @@ func TestBuildPlanApplyVersionComment(t *testing.T) { actComment := commentParser.BuildPlanComment(c.repoRelDir, c.workspace, c.project, c.commentArgs) Equals(t, fmt.Sprintf("atlantis plan %s", c.expPlanFlags), actComment) case command.Apply: - actComment := commentParser.BuildApplyComment(c.repoRelDir, c.workspace, c.project, c.autoMergeDisabled) + actComment := commentParser.BuildApplyComment(c.repoRelDir, c.workspace, c.project, c.autoMergeDisabled, c.autoMergeMethod) Equals(t, fmt.Sprintf("atlantis apply %s", c.expApplyFlags), actComment) } } @@ -1017,14 +1031,18 @@ var PlanUsage = `Usage of plan: ` var ApplyUsage = `Usage of apply: - --auto-merge-disabled Disable automerge after apply. - -d, --dir string Apply the plan for this directory, relative to root of - repo, ex. 'child/dir'. - -p, --project string Apply the plan for this project. Refers to the name of - the project configured in a repo config file. Cannot - be used at same time as workspace or dir flags. - --verbose Append Atlantis log to comment. - -w, --workspace string Apply the plan for this Terraform workspace. + --auto-merge-disabled Disable automerge after apply. + --auto-merge-method string Specifies the merge method for the VCS if + automerge is enabled. (Currently only implemented + for GitHub) + -d, --dir string Apply the plan for this directory, relative to + root of repo, ex. 'child/dir'. + -p, --project string Apply the plan for this project. Refers to the + name of the project configured in a repo config + file. Cannot be used at same time as workspace or + dir flags. + --verbose Append Atlantis log to comment. + -w, --workspace string Apply the plan for this Terraform workspace. ` var ApprovePolicyUsage = `Usage of approve_policies: diff --git a/server/events/commit_status_updater.go b/server/events/commit_status_updater.go index 07c97b184b..a05b7ef808 100644 --- a/server/events/commit_status_updater.go +++ b/server/events/commit_status_updater.go @@ -20,24 +20,25 @@ import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs" + "github.com/runatlantis/atlantis/server/logging" "golang.org/x/text/cases" "golang.org/x/text/language" ) -//go:generate pegomock generate --package mocks -o mocks/mock_commit_status_updater.go CommitStatusUpdater +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_commit_status_updater.go CommitStatusUpdater // CommitStatusUpdater updates the status of a commit with the VCS host. We set // the status to signify whether the plan/apply succeeds. type CommitStatusUpdater interface { // UpdateCombined updates the combined status of the head commit of pull. // A combined status represents all the projects modified in the pull. - UpdateCombined(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name) error + UpdateCombined(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name) error // UpdateCombinedCount updates the combined status to reflect the // numSuccess out of numTotal. - UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name, numSuccess int, numTotal int) error + UpdateCombinedCount(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name, numSuccess int, numTotal int) error - UpdatePreWorkflowHook(pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error - UpdatePostWorkflowHook(pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error + UpdatePreWorkflowHook(logger logging.SimpleLogging, pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error + UpdatePostWorkflowHook(logger logging.SimpleLogging, pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error } // DefaultCommitStatusUpdater implements CommitStatusUpdater. @@ -51,7 +52,7 @@ type DefaultCommitStatusUpdater struct { // cause runtime.StatusUpdater is extracted for resolving circular dependency var _ runtime.StatusUpdater = (*DefaultCommitStatusUpdater)(nil) -func (d *DefaultCommitStatusUpdater) UpdateCombined(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name) error { +func (d *DefaultCommitStatusUpdater) UpdateCombined(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name) error { src := fmt.Sprintf("%s/%s", d.StatusName, cmdName.String()) var descripWords string switch status { @@ -62,10 +63,10 @@ func (d *DefaultCommitStatusUpdater) UpdateCombined(repo models.Repo, pull model case models.SuccessCommitStatus: descripWords = genProjectStatusDescription(cmdName.String(), "succeeded.") } - return d.Client.UpdateStatus(repo, pull, status, src, descripWords, "") + return d.Client.UpdateStatus(logger, repo, pull, status, src, descripWords, "") } -func (d *DefaultCommitStatusUpdater) UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name, numSuccess int, numTotal int) error { +func (d *DefaultCommitStatusUpdater) UpdateCombinedCount(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name, numSuccess int, numTotal int) error { src := fmt.Sprintf("%s/%s", d.StatusName, cmdName.String()) cmdVerb := "unknown" @@ -78,7 +79,7 @@ func (d *DefaultCommitStatusUpdater) UpdateCombinedCount(repo models.Repo, pull cmdVerb = "applied" } - return d.Client.UpdateStatus(repo, pull, status, src, fmt.Sprintf("%d/%d projects %s successfully.", numSuccess, numTotal, cmdVerb), "") + return d.Client.UpdateStatus(logger, repo, pull, status, src, fmt.Sprintf("%d/%d projects %s successfully.", numSuccess, numTotal, cmdVerb), "") } func (d *DefaultCommitStatusUpdater) UpdateProject(ctx command.ProjectContext, cmdName command.Name, status models.CommitStatus, url string, result *command.ProjectResult) error { @@ -100,22 +101,22 @@ func (d *DefaultCommitStatusUpdater) UpdateProject(ctx command.ProjectContext, c descripWords = genProjectStatusDescription(cmdName.String(), "succeeded.") } } - return d.Client.UpdateStatus(ctx.BaseRepo, ctx.Pull, status, src, descripWords, url) + return d.Client.UpdateStatus(ctx.Log, ctx.BaseRepo, ctx.Pull, status, src, descripWords, url) } func genProjectStatusDescription(cmdName, description string) string { return fmt.Sprintf("%s %s", cases.Title(language.English).String(cmdName), description) } -func (d *DefaultCommitStatusUpdater) UpdatePreWorkflowHook(pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error { - return d.updateWorkflowHook(pull, status, hookDescription, runtimeDescription, "pre_workflow_hook", url) +func (d *DefaultCommitStatusUpdater) UpdatePreWorkflowHook(log logging.SimpleLogging, pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error { + return d.updateWorkflowHook(log, pull, status, hookDescription, runtimeDescription, "pre_workflow_hook", url) } -func (d *DefaultCommitStatusUpdater) UpdatePostWorkflowHook(pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error { - return d.updateWorkflowHook(pull, status, hookDescription, runtimeDescription, "post_workflow_hook", url) +func (d *DefaultCommitStatusUpdater) UpdatePostWorkflowHook(log logging.SimpleLogging, pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error { + return d.updateWorkflowHook(log, pull, status, hookDescription, runtimeDescription, "post_workflow_hook", url) } -func (d *DefaultCommitStatusUpdater) updateWorkflowHook(pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, workflowType string, url string) error { +func (d *DefaultCommitStatusUpdater) updateWorkflowHook(log logging.SimpleLogging, pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, workflowType string, url string) error { src := fmt.Sprintf("%s/%s: %s", d.StatusName, workflowType, hookDescription) var descripWords string @@ -132,5 +133,5 @@ func (d *DefaultCommitStatusUpdater) updateWorkflowHook(pull models.PullRequest, } } - return d.Client.UpdateStatus(pull.BaseRepo, pull, status, src, descripWords, url) + return d.Client.UpdateStatus(log, pull.BaseRepo, pull, status, src, descripWords, url) } diff --git a/server/events/commit_status_updater_test.go b/server/events/commit_status_updater_test.go index a84f1ced17..1fe059f203 100644 --- a/server/events/commit_status_updater_test.go +++ b/server/events/commit_status_updater_test.go @@ -22,10 +22,12 @@ import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs/mocks" + "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) func TestUpdateCombined(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { status models.CommitStatus command command.Name @@ -68,16 +70,17 @@ func TestUpdateCombined(t *testing.T) { RegisterMockTestingT(t) client := mocks.NewMockClient() s := events.DefaultCommitStatusUpdater{Client: client, StatusName: "atlantis"} - err := s.UpdateCombined(models.Repo{}, models.PullRequest{}, c.status, c.command) + err := s.UpdateCombined(logger, models.Repo{}, models.PullRequest{}, c.status, c.command) Ok(t, err) expSrc := fmt.Sprintf("atlantis/%s", c.command) - client.VerifyWasCalledOnce().UpdateStatus(models.Repo{}, models.PullRequest{}, c.status, expSrc, c.expDescrip, "") + client.VerifyWasCalledOnce().UpdateStatus(logger, models.Repo{}, models.PullRequest{}, c.status, expSrc, c.expDescrip, "") }) } } func TestUpdateCombinedCount(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { status models.CommitStatus command command.Name @@ -134,11 +137,11 @@ func TestUpdateCombinedCount(t *testing.T) { RegisterMockTestingT(t) client := mocks.NewMockClient() s := events.DefaultCommitStatusUpdater{Client: client, StatusName: "atlantis-test"} - err := s.UpdateCombinedCount(models.Repo{}, models.PullRequest{}, c.status, c.command, c.numSuccess, c.numTotal) + err := s.UpdateCombinedCount(logger, models.Repo{}, models.PullRequest{}, c.status, c.command, c.numSuccess, c.numTotal) Ok(t, err) expSrc := fmt.Sprintf("%s/%s", s.StatusName, c.command) - client.VerifyWasCalledOnce().UpdateStatus(models.Repo{}, models.PullRequest{}, c.status, expSrc, c.expDescrip, "") + client.VerifyWasCalledOnce().UpdateStatus(logger, models.Repo{}, models.PullRequest{}, c.status, expSrc, c.expDescrip, "") }) } } @@ -177,7 +180,9 @@ func TestDefaultCommitStatusUpdater_UpdateProjectSrc(t *testing.T) { Workspace: c.workspace, }, command.Plan, models.PendingCommitStatus, "url", nil) Ok(t, err) - client.VerifyWasCalledOnce().UpdateStatus(models.Repo{}, models.PullRequest{}, models.PendingCommitStatus, c.expSrc, "Plan in progress...", "url") + client.VerifyWasCalledOnce().UpdateStatus( + Any[logging.SimpleLogging](), Eq(models.Repo{}), Eq(models.PullRequest{}), Eq(models.PendingCommitStatus), Eq(c.expSrc), + Eq("Plan in progress..."), Eq("url")) }) } } @@ -240,7 +245,8 @@ func TestDefaultCommitStatusUpdater_UpdateProject(t *testing.T) { Workspace: "default", }, c.cmd, c.status, "url", c.result) Ok(t, err) - client.VerifyWasCalledOnce().UpdateStatus(models.Repo{}, models.PullRequest{}, c.status, fmt.Sprintf("atlantis/%s: ./default", c.cmd.String()), c.expDescrip, "url") + client.VerifyWasCalledOnce().UpdateStatus(Any[logging.SimpleLogging](), Eq(models.Repo{}), Eq(models.PullRequest{}), Eq(c.status), + Eq(fmt.Sprintf("atlantis/%s: ./default", c.cmd.String())), Eq(c.expDescrip), Eq("url")) }) } } @@ -255,6 +261,6 @@ func TestDefaultCommitStatusUpdater_UpdateProjectCustomStatusName(t *testing.T) Workspace: "default", }, command.Apply, models.SuccessCommitStatus, "url", nil) Ok(t, err) - client.VerifyWasCalledOnce().UpdateStatus(models.Repo{}, models.PullRequest{}, - models.SuccessCommitStatus, "custom/apply: ./default", "Apply succeeded.", "url") + client.VerifyWasCalledOnce().UpdateStatus(Any[logging.SimpleLogging](), Eq(models.Repo{}), Eq(models.PullRequest{}), + Eq(models.SuccessCommitStatus), Eq("custom/apply: ./default"), Eq("Apply succeeded."), Eq("url")) } diff --git a/server/events/delete_lock_command.go b/server/events/delete_lock_command.go index 89016503fb..1c9abcdda0 100644 --- a/server/events/delete_lock_command.go +++ b/server/events/delete_lock_command.go @@ -6,25 +6,24 @@ import ( "github.com/runatlantis/atlantis/server/logging" ) -//go:generate pegomock generate --package mocks -o mocks/mock_delete_lock_command.go DeleteLockCommand +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_delete_lock_command.go DeleteLockCommand // DeleteLockCommand is the first step after a command request has been parsed. type DeleteLockCommand interface { - DeleteLock(id string) (*models.ProjectLock, error) - DeleteLocksByPull(repoFullName string, pullNum int) (int, error) + DeleteLock(logger logging.SimpleLogging, id string) (*models.ProjectLock, error) + DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) (int, error) } // DefaultDeleteLockCommand deletes a specific lock after a request from the LocksController. type DefaultDeleteLockCommand struct { Locker locking.Locker - Logger logging.SimpleLogging WorkingDir WorkingDir WorkingDirLocker WorkingDirLocker Backend locking.Backend } // DeleteLock handles deleting the lock at id -func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, error) { +func (l *DefaultDeleteLockCommand) DeleteLock(logger logging.SimpleLogging, id string) (*models.ProjectLock, error) { lock, err := l.Locker.Unlock(id) if err != nil { return nil, err @@ -33,9 +32,9 @@ func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, e return nil, nil } - removeErr := l.WorkingDir.DeletePlan(lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName) + removeErr := l.WorkingDir.DeletePlan(logger, lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName) if removeErr != nil { - l.Logger.Warn("Failed to delete plan: %s", removeErr) + logger.Warn("Failed to delete plan: %s", removeErr) return nil, removeErr } @@ -43,23 +42,23 @@ func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, e } // DeleteLocksByPull handles deleting all locks for the pull request -func (l *DefaultDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) (int, error) { +func (l *DefaultDeleteLockCommand) DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) (int, error) { locks, err := l.Locker.UnlockByPull(repoFullName, pullNum) numLocks := len(locks) if err != nil { return numLocks, err } if numLocks == 0 { - l.Logger.Debug("No locks found for repo '%v', pull request: %v", repoFullName, pullNum) + logger.Debug("No locks found for repo '%v', pull request: %v", repoFullName, pullNum) return numLocks, nil } for i := 0; i < numLocks; i++ { lock := locks[i] - err := l.WorkingDir.DeletePlan(lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName) + err := l.WorkingDir.DeletePlan(logger, lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName) if err != nil { - l.Logger.Warn("Failed to delete plan: %s", err) + logger.Warn("Failed to delete plan: %s", err) return numLocks, err } } diff --git a/server/events/delete_lock_command_test.go b/server/events/delete_lock_command_test.go index 75ffe0488b..2e652770b9 100644 --- a/server/events/delete_lock_command_test.go +++ b/server/events/delete_lock_command_test.go @@ -15,33 +15,30 @@ import ( func TestDeleteLock_LockerErr(t *testing.T) { t.Log("If there is an error retrieving the lock, we return the error") + logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) l := lockmocks.NewMockLocker() When(l.Unlock("id")).ThenReturn(nil, errors.New("err")) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopLogger(t), - } - _, err := dlc.DeleteLock("id") + dlc := events.DefaultDeleteLockCommand{Locker: l} + _, err := dlc.DeleteLock(logger, "id") ErrEquals(t, "err", err) } func TestDeleteLock_None(t *testing.T) { t.Log("If there is no lock at that ID we return nil") + logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) l := lockmocks.NewMockLocker() When(l.Unlock("id")).ThenReturn(nil, nil) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopLogger(t), - } - lock, err := dlc.DeleteLock("id") + dlc := events.DefaultDeleteLockCommand{Locker: l} + lock, err := dlc.DeleteLock(logger, "id") Ok(t, err) Assert(t, lock == nil, "lock was not nil") } func TestDeleteLock_Success(t *testing.T) { t.Log("Delete lock deletes successfully the plan file") + logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) l := lockmocks.NewMockLocker() When(l.Unlock("id")).ThenReturn(&models.ProjectLock{}, nil) @@ -66,19 +63,20 @@ func TestDeleteLock_Success(t *testing.T) { Ok(t, err) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), Backend: db, WorkingDirLocker: workingDirLocker, WorkingDir: workingDir, } - lock, err := dlc.DeleteLock("id") + lock, err := dlc.DeleteLock(logger, "id") Ok(t, err) Assert(t, lock != nil, "lock was nil") - workingDir.VerifyWasCalledOnce().DeletePlan(pull.BaseRepo, pull, workspace, path, projectName) + workingDir.VerifyWasCalledOnce().DeletePlan(Any[logging.SimpleLogging](), Eq(pull.BaseRepo), Eq(pull), Eq(workspace), + Eq(path), Eq(projectName)) } func TestDeleteLocksByPull_LockerErr(t *testing.T) { t.Log("If there is an error retrieving the lock, returned a failed status") + logger := logging.NewNoopLogger(t) repoName := "reponame" pullNum := 2 RegisterMockTestingT(t) @@ -87,16 +85,17 @@ func TestDeleteLocksByPull_LockerErr(t *testing.T) { When(l.UnlockByPull(repoName, pullNum)).ThenReturn(nil, errors.New("err")) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), WorkingDir: workingDir, } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) + _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum) ErrEquals(t, "err", err) - workingDir.VerifyWasCalled(Never()).DeletePlan(Any[models.Repo](), Any[models.PullRequest](), Any[string](), Any[string](), Any[string]()) + workingDir.VerifyWasCalled(Never()).DeletePlan(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string](), Any[string](), Any[string]()) } func TestDeleteLocksByPull_None(t *testing.T) { t.Log("If there is no lock at that ID there is no error") + logger := logging.NewNoopLogger(t) repoName := "reponame" pullNum := 2 RegisterMockTestingT(t) @@ -105,16 +104,17 @@ func TestDeleteLocksByPull_None(t *testing.T) { When(l.UnlockByPull(repoName, pullNum)).ThenReturn([]models.ProjectLock{}, nil) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), WorkingDir: workingDir, } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) + _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum) Ok(t, err) - workingDir.VerifyWasCalled(Never()).DeletePlan(Any[models.Repo](), Any[models.PullRequest](), Any[string](), Any[string](), Any[string]()) + workingDir.VerifyWasCalled(Never()).DeletePlan(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string](), Any[string](), Any[string]()) } func TestDeleteLocksByPull_SingleSuccess(t *testing.T) { t.Log("If a single lock is successfully deleted") + logger := logging.NewNoopLogger(t) repoName := "reponame" pullNum := 2 path := "." @@ -142,16 +142,17 @@ func TestDeleteLocksByPull_SingleSuccess(t *testing.T) { ) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), WorkingDir: workingDir, } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) + _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum) Ok(t, err) - workingDir.VerifyWasCalled(Once()).DeletePlan(pull.BaseRepo, pull, workspace, path, projectName) + workingDir.VerifyWasCalled(Once()).DeletePlan(Any[logging.SimpleLogging](), Eq(pull.BaseRepo), Eq(pull), Eq(workspace), + Eq(path), Eq(projectName)) } func TestDeleteLocksByPull_MultipleSuccess(t *testing.T) { t.Log("If multiple locks are successfully deleted") + logger := logging.NewNoopLogger(t) repoName := "reponame" pullNum := 2 path1 := "path1" @@ -187,11 +188,10 @@ func TestDeleteLocksByPull_MultipleSuccess(t *testing.T) { ) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), WorkingDir: workingDir, } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) + _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum) Ok(t, err) - workingDir.VerifyWasCalled(Once()).DeletePlan(pull.BaseRepo, pull, workspace, path1, projectName) - workingDir.VerifyWasCalled(Once()).DeletePlan(pull.BaseRepo, pull, workspace, path2, projectName) + workingDir.VerifyWasCalled(Once()).DeletePlan(logger, pull.BaseRepo, pull, workspace, path1, projectName) + workingDir.VerifyWasCalled(Once()).DeletePlan(logger, pull.BaseRepo, pull, workspace, path2, projectName) } diff --git a/server/events/event_parser.go b/server/events/event_parser.go index 6aa043f794..a6b4b363ac 100644 --- a/server/events/event_parser.go +++ b/server/events/event_parser.go @@ -20,8 +20,10 @@ import ( "path" "strings" + giteasdk "code.gitea.io/sdk/gitea" + "github.com/go-playground/validator/v10" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" lru "github.com/hashicorp/golang-lru/v2" "github.com/mcdafydd/go-azuredevops/azuredevops" "github.com/pkg/errors" @@ -29,6 +31,8 @@ import ( "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver" + "github.com/runatlantis/atlantis/server/events/vcs/gitea" + "github.com/runatlantis/atlantis/server/logging" "github.com/xanzy/go-gitlab" ) @@ -124,6 +128,8 @@ type CommentCommand struct { SubName string // AutoMergeDisabled is true if the command should not automerge after apply. AutoMergeDisabled bool + // AutoMergeMethod specified the merge method for the VCS if automerge enabled. + AutoMergeMethod string // Verbose is true if the command should output verbosely. Verbose bool // Workspace is the name of the Terraform workspace to run the command in. @@ -173,11 +179,11 @@ func (c CommentCommand) IsAutoplan() bool { // String returns a string representation of the command. func (c CommentCommand) String() string { - return fmt.Sprintf("command=%q verbose=%t dir=%q workspace=%q project=%q policyset=%q, clear-policy-approval=%t, flags=%q", c.Name.String(), c.Verbose, c.RepoRelDir, c.Workspace, c.ProjectName, c.PolicySet, c.ClearPolicyApproval, strings.Join(c.Flags, ",")) + return fmt.Sprintf("command=%q, verbose=%t, dir=%q, workspace=%q, project=%q, policyset=%q, auto-merge-disabled=%t, auto-merge-method=%s, clear-policy-approval=%t, flags=%q", c.Name.String(), c.Verbose, c.RepoRelDir, c.Workspace, c.ProjectName, c.PolicySet, c.AutoMergeDisabled, c.AutoMergeMethod, c.ClearPolicyApproval, strings.Join(c.Flags, ",")) } // NewCommentCommand constructs a CommentCommand, setting all missing fields to defaults. -func NewCommentCommand(repoRelDir string, flags []string, name command.Name, subName string, verbose, autoMergeDisabled bool, workspace string, project string, policySet string, clearPolicyApproval bool) *CommentCommand { +func NewCommentCommand(repoRelDir string, flags []string, name command.Name, subName string, verbose, autoMergeDisabled bool, autoMergeMethod string, workspace string, project string, policySet string, clearPolicyApproval bool) *CommentCommand { // If repoRelDir was empty we want to keep it that way to indicate that it // wasn't specified in the comment. if repoRelDir != "" { @@ -194,13 +200,14 @@ func NewCommentCommand(repoRelDir string, flags []string, name command.Name, sub Verbose: verbose, Workspace: workspace, AutoMergeDisabled: autoMergeDisabled, + AutoMergeMethod: autoMergeMethod, ProjectName: project, PolicySet: policySet, ClearPolicyApproval: clearPolicyApproval, } } -//go:generate pegomock generate --package mocks -o mocks/mock_event_parsing.go EventParsing +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_event_parsing.go EventParsing // EventParsing parses webhook events from different VCS hosts into their // respective Atlantis models. @@ -210,7 +217,7 @@ type EventParsing interface { // baseRepo is the repo that the pull request will be merged into. // user is the pull request author. // pullNum is the number of the pull request that triggered the webhook. - ParseGithubIssueCommentEvent(comment *github.IssueCommentEvent) ( + ParseGithubIssueCommentEvent(logger logging.SimpleLogging, comment *github.IssueCommentEvent) ( baseRepo models.Repo, user models.User, pullNum int, err error) // ParseGithubPull parses the response from the GitHub API endpoint (not @@ -218,7 +225,7 @@ type EventParsing interface { // pull is the parsed pull request. // baseRepo is the repo the pull request will be merged into. // headRepo is the repo the pull request branch is from. - ParseGithubPull(ghPull *github.PullRequest) ( + ParseGithubPull(logger logging.SimpleLogging, ghPull *github.PullRequest) ( pull models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) // ParseGithubPullEvent parses GitHub pull request events. @@ -227,7 +234,7 @@ type EventParsing interface { // baseRepo is the repo the pull request will be merged into. // headRepo is the repo the pull request branch is from. // user is the pull request author. - ParseGithubPullEvent(pullEvent *github.PullRequestEvent) ( + ParseGithubPullEvent(logger logging.SimpleLogging, pullEvent *github.PullRequestEvent) ( pull models.PullRequest, pullEventType models.PullRequestEventType, baseRepo models.Repo, headRepo models.Repo, user models.User, err error) @@ -336,6 +343,14 @@ type EventParsing interface { // ParseAzureDevopsRepo parses the response from the Azure DevOps API endpoint that // returns a repo into the Atlantis model. ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) (models.Repo, error) + + ParseGiteaPullRequestEvent(event giteasdk.PullRequest) ( + pull models.PullRequest, pullEventType models.PullRequestEventType, + baseRepo models.Repo, headRepo models.Repo, user models.User, err error) + + ParseGiteaIssueCommentEvent(event gitea.GiteaIssueCommentPayload) (baseRepo models.Repo, user models.User, pullNum int, err error) + + ParseGiteaPull(pull *giteasdk.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) } // EventParser parses VCS events. @@ -344,6 +359,8 @@ type EventParser struct { GithubToken string GitlabUser string GitlabToken string + GiteaUser string + GiteaToken string AllowDraftPRs bool BitbucketUser string BitbucketToken string @@ -356,6 +373,8 @@ func (e *EventParser) ParseAPIPlanRequest(vcsHostType models.VCSHostType, repoFu switch vcsHostType { case models.Github: return models.NewRepo(vcsHostType, repoFullName, cloneURL, e.GithubUser, e.GithubToken) + case models.Gitea: + return models.NewRepo(vcsHostType, repoFullName, cloneURL, e.GiteaUser, e.GiteaToken) case models.Gitlab: return models.NewRepo(vcsHostType, repoFullName, cloneURL, e.GitlabUser, e.GitlabToken) } @@ -471,7 +490,7 @@ func (e *EventParser) ParseBitbucketCloudPullEvent(body []byte) (pull models.Pul // ParseGithubIssueCommentEvent parses GitHub pull request comment events. // See EventParsing for return value docs. -func (e *EventParser) ParseGithubIssueCommentEvent(comment *github.IssueCommentEvent) (baseRepo models.Repo, user models.User, pullNum int, err error) { +func (e *EventParser) ParseGithubIssueCommentEvent(logger logging.SimpleLogging, comment *github.IssueCommentEvent) (baseRepo models.Repo, user models.User, pullNum int, err error) { baseRepo, err = e.ParseGithubRepo(comment.Repo) if err != nil { return @@ -494,12 +513,12 @@ func (e *EventParser) ParseGithubIssueCommentEvent(comment *github.IssueCommentE // ParseGithubPullEvent parses GitHub pull request events. // See EventParsing for return value docs. -func (e *EventParser) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) (pull models.PullRequest, pullEventType models.PullRequestEventType, baseRepo models.Repo, headRepo models.Repo, user models.User, err error) { +func (e *EventParser) ParseGithubPullEvent(logger logging.SimpleLogging, pullEvent *github.PullRequestEvent) (pull models.PullRequest, pullEventType models.PullRequestEventType, baseRepo models.Repo, headRepo models.Repo, user models.User, err error) { if pullEvent.PullRequest == nil { err = errors.New("pull_request is null") return } - pull, baseRepo, headRepo, err = e.ParseGithubPull(pullEvent.PullRequest) + pull, baseRepo, headRepo, err = e.ParseGithubPull(logger, pullEvent.PullRequest) if err != nil { return } @@ -543,7 +562,7 @@ func (e *EventParser) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) ( // ParseGithubPull parses the response from the GitHub API endpoint (not // from a webhook) that returns a pull request. // See EventParsing for return value docs. -func (e *EventParser) ParseGithubPull(pull *github.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) { +func (e *EventParser) ParseGithubPull(logger logging.SimpleLogging, pull *github.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) { commit := pull.Head.GetSHA() if commit == "" { err = errors.New("head.sha is null") @@ -610,6 +629,13 @@ func (e *EventParser) ParseGithubRepo(ghRepo *github.Repository) (models.Repo, e return models.NewRepo(models.Github, ghRepo.GetFullName(), ghRepo.GetCloneURL(), e.GithubUser, e.GithubToken) } +// ParseGiteaRepo parses the response from the Gitea API endpoint that +// returns a repo into the Atlantis model. +// See EventParsing for return value docs. +func (e *EventParser) ParseGiteaRepo(repo giteasdk.Repository) (models.Repo, error) { + return models.NewRepo(models.Gitea, repo.FullName, repo.CloneURL, e.GiteaUser, e.GiteaToken) +} + // ParseGitlabMergeRequestUpdateEvent dives deeper into Gitlab merge request update events func (e *EventParser) ParseGitlabMergeRequestUpdateEvent(event gitlab.MergeEvent) models.PullRequestEventType { // New commit to opened MR @@ -702,6 +728,27 @@ func (e *EventParser) ParseGitlabMergeRequestCommentEvent(event gitlab.MergeComm return } +func (e *EventParser) ParseGiteaIssueCommentEvent(comment gitea.GiteaIssueCommentPayload) (baseRepo models.Repo, user models.User, pullNum int, err error) { + baseRepo, err = e.ParseGiteaRepo(comment.Repository) + if err != nil { + return + } + if comment.Comment.Body == "" || comment.Comment.Poster.UserName == "" { + err = errors.New("comment.user.login is null") + return + } + commenterUsername := comment.Comment.Poster.UserName + user = models.User{ + Username: commenterUsername, + } + pullNum = int(comment.Issue.Index) + if pullNum == 0 { + err = errors.New("issue.number is null") + return + } + return +} + // ParseGitlabMergeRequest parses the merge requests and returns a pull request // model. We require passing in baseRepo because we can't get this information // from the merge request. The only caller of this function already has that @@ -988,3 +1035,121 @@ func (e *EventParser) ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) (m fullName := fmt.Sprintf("%s/%s/%s", owner, project, repo) return models.NewRepo(models.AzureDevops, fullName, cloneURL, e.AzureDevopsUser, e.AzureDevopsToken) } + +func (e *EventParser) ParseGiteaPullRequestEvent(event giteasdk.PullRequest) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) { + var pullEventType models.PullRequestEventType + + // Determine the event type based on the state of the pull request and whether it's merged. + switch { + case event.State == giteasdk.StateOpen: + pullEventType = models.OpenedPullEvent + case event.HasMerged: + pullEventType = models.ClosedPullEvent + default: + pullEventType = models.OtherPullEvent + } + + // Parse the base repository. + baseRepo, err := models.NewRepo( + models.Gitea, + event.Base.Repository.FullName, + event.Base.Repository.CloneURL, + e.GiteaUser, + e.GiteaToken, + ) + if err != nil { + return models.PullRequest{}, models.OtherPullEvent, models.Repo{}, models.Repo{}, models.User{}, err + } + + // Parse the head repository. + headRepo, err := models.NewRepo( + models.Gitea, + event.Head.Repository.FullName, + event.Head.Repository.CloneURL, + e.GiteaUser, + e.GiteaToken, + ) + if err != nil { + return models.PullRequest{}, models.OtherPullEvent, models.Repo{}, models.Repo{}, models.User{}, err + } + + // Construct the pull request model. + pull := models.PullRequest{ + Num: int(event.Index), + URL: event.HTMLURL, + HeadCommit: event.Head.Sha, + HeadBranch: (*event.Head).Ref, + BaseBranch: event.Base.Ref, + Author: event.Poster.UserName, + BaseRepo: baseRepo, + } + + // Parse the user who made the pull request. + user := models.User{ + Username: event.Poster.UserName, + } + return pull, pullEventType, baseRepo, headRepo, user, nil +} + +// ParseGiteaPull parses the response from the Gitea API endpoint (not +// from a webhook) that returns a pull request. +// See EventParsing for return value docs. +func (e *EventParser) ParseGiteaPull(pull *giteasdk.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) { + commit := pull.Head.Sha + if commit == "" { + err = errors.New("head.sha is null") + return + } + url := pull.HTMLURL + if url == "" { + err = errors.New("html_url is null") + return + } + headBranch := pull.Head.Ref + if headBranch == "" { + err = errors.New("head.ref is null") + return + } + baseBranch := pull.Base.Ref + if baseBranch == "" { + err = errors.New("base.ref is null") + return + } + + authorUsername := pull.Poster.UserName + if authorUsername == "" { + err = errors.New("user.login is null") + return + } + num := pull.Index + if num == 0 { + err = errors.New("number is null") + return + } + + baseRepo, err = e.ParseGiteaRepo(*pull.Base.Repository) + if err != nil { + return + } + headRepo, err = e.ParseGiteaRepo(*pull.Head.Repository) + if err != nil { + return + } + + pullState := models.ClosedPullState + if pull.State == "open" { + pullState = models.OpenPullState + } + + pullModel = models.PullRequest{ + Author: authorUsername, + HeadBranch: headBranch, + HeadCommit: commit, + URL: url, + Num: int(num), + State: pullState, + BaseRepo: baseRepo, + BaseBranch: baseBranch, + } + return +} diff --git a/server/events/event_parser_test.go b/server/events/event_parser_test.go index bd71a2a335..fffe30e3eb 100644 --- a/server/events/event_parser_test.go +++ b/server/events/event_parser_test.go @@ -21,13 +21,14 @@ import ( "strings" "testing" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/mcdafydd/go-azuredevops/azuredevops" "github.com/mohae/deepcopy" "github.com/runatlantis/atlantis/server/events" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" . "github.com/runatlantis/atlantis/server/events/vcs/testdata" + "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" gitlab "github.com/xanzy/go-gitlab" ) @@ -62,6 +63,7 @@ func TestParseGithubRepo(t *testing.T) { } func TestParseGithubIssueCommentEvent(t *testing.T) { + logger := logging.NewNoopLogger(t) comment := github.IssueCommentEvent{ Repo: &Repo, Issue: &github.Issue{ @@ -76,26 +78,26 @@ func TestParseGithubIssueCommentEvent(t *testing.T) { testComment := deepcopy.Copy(comment).(github.IssueCommentEvent) testComment.Comment = nil - _, _, _, err := parser.ParseGithubIssueCommentEvent(&testComment) + _, _, _, err := parser.ParseGithubIssueCommentEvent(logger, &testComment) ErrEquals(t, "comment.user.login is null", err) testComment = deepcopy.Copy(comment).(github.IssueCommentEvent) testComment.Comment.User = nil - _, _, _, err = parser.ParseGithubIssueCommentEvent(&testComment) + _, _, _, err = parser.ParseGithubIssueCommentEvent(logger, &testComment) ErrEquals(t, "comment.user.login is null", err) testComment = deepcopy.Copy(comment).(github.IssueCommentEvent) testComment.Comment.User.Login = nil - _, _, _, err = parser.ParseGithubIssueCommentEvent(&testComment) + _, _, _, err = parser.ParseGithubIssueCommentEvent(logger, &testComment) ErrEquals(t, "comment.user.login is null", err) testComment = deepcopy.Copy(comment).(github.IssueCommentEvent) testComment.Issue = nil - _, _, _, err = parser.ParseGithubIssueCommentEvent(&testComment) + _, _, _, err = parser.ParseGithubIssueCommentEvent(logger, &testComment) ErrEquals(t, "issue.number is null", err) // this should be successful - repo, user, pullNum, err := parser.ParseGithubIssueCommentEvent(&comment) + repo, user, pullNum, err := parser.ParseGithubIssueCommentEvent(logger, &comment) Ok(t, err) Equals(t, models.Repo{ Owner: *comment.Repo.Owner.Login, @@ -115,25 +117,26 @@ func TestParseGithubIssueCommentEvent(t *testing.T) { } func TestParseGithubPullEvent(t *testing.T) { - _, _, _, _, _, err := parser.ParseGithubPullEvent(&github.PullRequestEvent{}) + logger := logging.NewNoopLogger(t) + _, _, _, _, _, err := parser.ParseGithubPullEvent(logger, &github.PullRequestEvent{}) ErrEquals(t, "pull_request is null", err) testEvent := deepcopy.Copy(PullEvent).(github.PullRequestEvent) testEvent.PullRequest.HTMLURL = nil - _, _, _, _, _, err = parser.ParseGithubPullEvent(&testEvent) + _, _, _, _, _, err = parser.ParseGithubPullEvent(logger, &testEvent) ErrEquals(t, "html_url is null", err) testEvent = deepcopy.Copy(PullEvent).(github.PullRequestEvent) testEvent.Sender = nil - _, _, _, _, _, err = parser.ParseGithubPullEvent(&testEvent) + _, _, _, _, _, err = parser.ParseGithubPullEvent(logger, &testEvent) ErrEquals(t, "sender is null", err) testEvent = deepcopy.Copy(PullEvent).(github.PullRequestEvent) testEvent.Sender.Login = nil - _, _, _, _, _, err = parser.ParseGithubPullEvent(&testEvent) + _, _, _, _, _, err = parser.ParseGithubPullEvent(logger, &testEvent) ErrEquals(t, "sender.login is null", err) - actPull, evType, actBaseRepo, actHeadRepo, actUser, err := parser.ParseGithubPullEvent(&PullEvent) + actPull, evType, actBaseRepo, actHeadRepo, actUser, err := parser.ParseGithubPullEvent(logger, &PullEvent) Ok(t, err) expBaseRepo := models.Repo{ Owner: "owner", @@ -163,30 +166,32 @@ func TestParseGithubPullEvent(t *testing.T) { } func TestParseGithubPullEventFromDraft(t *testing.T) { + logger := logging.NewNoopLogger(t) // verify that close event treated as 'close' events by default closeEvent := deepcopy.Copy(PullEvent).(github.PullRequestEvent) closeEvent.Action = github.String("closed") closeEvent.PullRequest.Draft = github.Bool(true) - _, evType, _, _, _, err := parser.ParseGithubPullEvent(&closeEvent) + _, evType, _, _, _, err := parser.ParseGithubPullEvent(logger, &closeEvent) Ok(t, err) Equals(t, models.ClosedPullEvent, evType) // verify that draft PRs are treated as 'other' events by default testEvent := deepcopy.Copy(PullEvent).(github.PullRequestEvent) testEvent.PullRequest.Draft = github.Bool(true) - _, evType, _, _, _, err = parser.ParseGithubPullEvent(&testEvent) + _, evType, _, _, _, err = parser.ParseGithubPullEvent(logger, &testEvent) Ok(t, err) Equals(t, models.OtherPullEvent, evType) // verify that drafts are planned if requested parser.AllowDraftPRs = true defer func() { parser.AllowDraftPRs = false }() - _, evType, _, _, _, err = parser.ParseGithubPullEvent(&testEvent) + _, evType, _, _, _, err = parser.ParseGithubPullEvent(logger, &testEvent) Ok(t, err) Equals(t, models.OpenedPullEvent, evType) } func TestParseGithubPullEvent_EventType(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { action string exp models.PullRequestEventType @@ -255,19 +260,19 @@ func TestParseGithubPullEvent_EventType(t *testing.T) { event := deepcopy.Copy(PullEvent).(github.PullRequestEvent) action := c.action event.Action = &action - _, actType, _, _, _, err := parser.ParseGithubPullEvent(&event) + _, actType, _, _, _, err := parser.ParseGithubPullEvent(logger, &event) Ok(t, err) Equals(t, c.exp, actType) // Test draft parsing when draft PRs disabled draftPR := true event.PullRequest.Draft = &draftPR - _, draftEvType, _, _, _, err := parser.ParseGithubPullEvent(&event) + _, draftEvType, _, _, _, err := parser.ParseGithubPullEvent(logger, &event) Ok(t, err) Equals(t, c.draftExp, draftEvType) // Test draft parsing when draft PRs are enabled. draftParser := parser draftParser.AllowDraftPRs = true - _, draftEvType, _, _, _, err = draftParser.ParseGithubPullEvent(&event) + _, draftEvType, _, _, _, err = draftParser.ParseGithubPullEvent(logger, &event) Ok(t, err) Equals(t, c.exp, draftEvType) }) @@ -275,37 +280,38 @@ func TestParseGithubPullEvent_EventType(t *testing.T) { } func TestParseGithubPull(t *testing.T) { + logger := logging.NewNoopLogger(t) testPull := deepcopy.Copy(Pull).(github.PullRequest) testPull.Head.SHA = nil - _, _, _, err := parser.ParseGithubPull(&testPull) + _, _, _, err := parser.ParseGithubPull(logger, &testPull) ErrEquals(t, "head.sha is null", err) testPull = deepcopy.Copy(Pull).(github.PullRequest) testPull.HTMLURL = nil - _, _, _, err = parser.ParseGithubPull(&testPull) + _, _, _, err = parser.ParseGithubPull(logger, &testPull) ErrEquals(t, "html_url is null", err) testPull = deepcopy.Copy(Pull).(github.PullRequest) testPull.Head.Ref = nil - _, _, _, err = parser.ParseGithubPull(&testPull) + _, _, _, err = parser.ParseGithubPull(logger, &testPull) ErrEquals(t, "head.ref is null", err) testPull = deepcopy.Copy(Pull).(github.PullRequest) testPull.Base.Ref = nil - _, _, _, err = parser.ParseGithubPull(&testPull) + _, _, _, err = parser.ParseGithubPull(logger, &testPull) ErrEquals(t, "base.ref is null", err) testPull = deepcopy.Copy(Pull).(github.PullRequest) testPull.User.Login = nil - _, _, _, err = parser.ParseGithubPull(&testPull) + _, _, _, err = parser.ParseGithubPull(logger, &testPull) ErrEquals(t, "user.login is null", err) testPull = deepcopy.Copy(Pull).(github.PullRequest) testPull.Number = nil - _, _, _, err = parser.ParseGithubPull(&testPull) + _, _, _, err = parser.ParseGithubPull(logger, &testPull) ErrEquals(t, "number is null", err) - pullRes, actBaseRepo, actHeadRepo, err := parser.ParseGithubPull(&Pull) + pullRes, actBaseRepo, actHeadRepo, err := parser.ParseGithubPull(logger, &Pull) Ok(t, err) expBaseRepo := models.Repo{ Owner: "owner", @@ -744,14 +750,14 @@ func TestNewCommand_CleansDir(t *testing.T) { for _, c := range cases { t.Run(c.RepoRelDir, func(t *testing.T) { - cmd := events.NewCommentCommand(c.RepoRelDir, nil, command.Plan, "", false, false, "workspace", "", "", false) + cmd := events.NewCommentCommand(c.RepoRelDir, nil, command.Plan, "", false, false, "", "workspace", "", "", false) Equals(t, c.ExpDir, cmd.RepoRelDir) }) } } func TestNewCommand_EmptyDirWorkspaceProject(t *testing.T) { - cmd := events.NewCommentCommand("", nil, command.Plan, "", false, false, "", "", "", false) + cmd := events.NewCommentCommand("", nil, command.Plan, "", false, false, "", "", "", "", false) Equals(t, events.CommentCommand{ RepoRelDir: "", Flags: nil, @@ -763,7 +769,7 @@ func TestNewCommand_EmptyDirWorkspaceProject(t *testing.T) { } func TestNewCommand_AllFieldsSet(t *testing.T) { - cmd := events.NewCommentCommand("dir", []string{"a", "b"}, command.Plan, "", true, false, "workspace", "project", "policyset", false) + cmd := events.NewCommentCommand("dir", []string{"a", "b"}, command.Plan, "", true, false, "", "workspace", "project", "policyset", false) Equals(t, events.CommentCommand{ Workspace: "workspace", RepoRelDir: "dir", @@ -810,7 +816,7 @@ func TestCommentCommand_IsAutoplan(t *testing.T) { } func TestCommentCommand_String(t *testing.T) { - exp := `command="plan" verbose=true dir="mydir" workspace="myworkspace" project="myproject" policyset="", clear-policy-approval=false, flags="flag1,flag2"` + exp := `command="plan", verbose=true, dir="mydir", workspace="myworkspace", project="myproject", policyset="", auto-merge-disabled=false, auto-merge-method=, clear-policy-approval=false, flags="flag1,flag2"` Equals(t, exp, (events.CommentCommand{ RepoRelDir: "mydir", Flags: []string{"flag1", "flag2"}, diff --git a/server/events/external_team_allowlist_checker.go b/server/events/external_team_allowlist_checker.go new file mode 100644 index 0000000000..9f3fe419ef --- /dev/null +++ b/server/events/external_team_allowlist_checker.go @@ -0,0 +1,59 @@ +package events + +import ( + "fmt" + "strings" + + "github.com/runatlantis/atlantis/server/core/runtime" + + "github.com/runatlantis/atlantis/server/events/models" +) + +type ExternalTeamAllowlistChecker struct { + Command string + ExtraArgs []string + ExternalTeamAllowlistRunner runtime.ExternalTeamAllowlistRunner +} + +func (checker *ExternalTeamAllowlistChecker) HasRules() bool { + return true +} + +func (checker *ExternalTeamAllowlistChecker) IsCommandAllowedForTeam(ctx models.TeamAllowlistCheckerContext, team string, command string) bool { + cmd := checker.buildCommandString(ctx, []string{team}, command) + out, err := checker.ExternalTeamAllowlistRunner.Run(ctx, "sh", "-c", cmd) + if err != nil { + return false + } + + return checker.checkOutputResults(out) +} + +func (checker *ExternalTeamAllowlistChecker) IsCommandAllowedForAnyTeam(ctx models.TeamAllowlistCheckerContext, teams []string, command string) bool { + cmd := checker.buildCommandString(ctx, teams, command) + out, err := checker.ExternalTeamAllowlistRunner.Run(ctx, "sh", "-c", cmd) + if err != nil { + return false + } + + return checker.checkOutputResults(out) +} + +func (checker *ExternalTeamAllowlistChecker) buildCommandString(ctx models.TeamAllowlistCheckerContext, teams []string, command string) string { + // Build command string + // Format is "$external_cmd $external_args $command $repo $teams" + cmdArr := append([]string{checker.Command}, checker.ExtraArgs...) + orgTeams := make([]string, len(teams)) + for i, team := range teams { + orgTeams[i] = fmt.Sprintf("%s/%s", ctx.BaseRepo.Owner, team) + } + + teamStr := strings.Join(orgTeams, " ") + return strings.Join(append(cmdArr, command, ctx.BaseRepo.FullName, teamStr), " ") +} + +func (checker *ExternalTeamAllowlistChecker) checkOutputResults(output string) bool { + lines := strings.Split(strings.TrimSpace(output), "\n") + lastLine := lines[len(lines)-1] + return strings.EqualFold(lastLine, "pass") +} diff --git a/server/events/external_team_allowlist_checker_test.go b/server/events/external_team_allowlist_checker_test.go new file mode 100644 index 0000000000..f6ee136b39 --- /dev/null +++ b/server/events/external_team_allowlist_checker_test.go @@ -0,0 +1,77 @@ +package events_test + +import ( + "testing" + + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" + + . "github.com/petergtz/pegomock/v4" + runtime_mocks "github.com/runatlantis/atlantis/server/core/runtime/mocks" + "github.com/runatlantis/atlantis/server/events" + . "github.com/runatlantis/atlantis/testing" +) + +var extTeamAllowlistChecker events.ExternalTeamAllowlistChecker +var extTeamAllowlistCheckerRunner *runtime_mocks.MockExternalTeamAllowlistRunner + +func externalTeamAllowlistCheckerSetup(t *testing.T) { + RegisterMockTestingT(t) + extTeamAllowlistCheckerRunner = runtime_mocks.NewMockExternalTeamAllowlistRunner() + + extTeamAllowlistChecker = events.ExternalTeamAllowlistChecker{ + ExternalTeamAllowlistRunner: extTeamAllowlistCheckerRunner, + } +} + +func TestIsCommandAllowedForTeam(t *testing.T) { + ctx := models.TeamAllowlistCheckerContext{ + Log: logging.NewNoopLogger(t), + } + + t.Run("allowed", func(t *testing.T) { + externalTeamAllowlistCheckerSetup(t) + + When(extTeamAllowlistCheckerRunner.Run(Any[models.TeamAllowlistCheckerContext](), Any[string](), Any[string](), + Any[string]())).ThenReturn("pass\n", nil) + + res := extTeamAllowlistChecker.IsCommandAllowedForTeam(ctx, "foo", "plan") + Equals(t, true, res) + }) + + t.Run("denied", func(t *testing.T) { + externalTeamAllowlistCheckerSetup(t) + + When(extTeamAllowlistCheckerRunner.Run(Any[models.TeamAllowlistCheckerContext](), Any[string](), Any[string](), + Any[string]())).ThenReturn("nothing found\n", nil) + + res := extTeamAllowlistChecker.IsCommandAllowedForTeam(ctx, "foo", "plan") + Equals(t, false, res) + }) +} + +func TestIsCommandAllowedForAnyTeam(t *testing.T) { + ctx := models.TeamAllowlistCheckerContext{ + Log: logging.NewNoopLogger(t), + } + + t.Run("allowed", func(t *testing.T) { + externalTeamAllowlistCheckerSetup(t) + + When(extTeamAllowlistCheckerRunner.Run(Any[models.TeamAllowlistCheckerContext](), Any[string](), Any[string](), + Any[string]())).ThenReturn("pass\n", nil) + + res := extTeamAllowlistChecker.IsCommandAllowedForAnyTeam(ctx, []string{"foo"}, "plan") + Equals(t, true, res) + }) + + t.Run("denied", func(t *testing.T) { + externalTeamAllowlistCheckerSetup(t) + + When(extTeamAllowlistCheckerRunner.Run(Any[models.TeamAllowlistCheckerContext](), Any[string](), Any[string](), + Any[string]())).ThenReturn("nothing found\n", nil) + + res := extTeamAllowlistChecker.IsCommandAllowedForAnyTeam(ctx, []string{"foo"}, "plan") + Equals(t, false, res) + }) +} diff --git a/server/events/github_app_working_dir.go b/server/events/github_app_working_dir.go index 85435f8590..a06599efe0 100644 --- a/server/events/github_app_working_dir.go +++ b/server/events/github_app_working_dir.go @@ -5,6 +5,7 @@ import ( "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs" + "github.com/runatlantis/atlantis/server/logging" ) const redactedReplacement = "://:@" @@ -19,7 +20,7 @@ type GithubAppWorkingDir struct { } // Clone writes a fresh token for Github App authentication -func (g *GithubAppWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (g *GithubAppWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { baseRepo := &p.BaseRepo // Realistically, this is a super brittle way of supporting clones using gh app installation tokens @@ -35,5 +36,5 @@ func (g *GithubAppWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, headRepo.CloneURL = strings.Replace(headRepo.CloneURL, "://:@", replacement, 1) headRepo.SanitizedCloneURL = strings.Replace(baseRepo.SanitizedCloneURL, redactedReplacement, replacement, 1) - return g.WorkingDir.Clone(headRepo, p, workspace) + return g.WorkingDir.Clone(logger, headRepo, p, workspace) } diff --git a/server/events/github_app_working_dir_test.go b/server/events/github_app_working_dir_test.go index 28983da870..78e64d4e0b 100644 --- a/server/events/github_app_working_dir_test.go +++ b/server/events/github_app_working_dir_test.go @@ -29,7 +29,6 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { DataDir: dataDir, CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), - Logger: logger, } defer disableSSLVerification()() @@ -46,7 +45,7 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { GithubHostname: testServer, } - cloneDir, _, err := gwd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, _, err := gwd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -58,6 +57,8 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { } func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { + logger := logging.NewNoopLogger(t) + RegisterMockTestingT(t) workingDir := eventMocks.NewMockWorkingDir() @@ -88,13 +89,12 @@ func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { modifiedBaseRepo.SanitizedCloneURL = "https://github.com/runatlantis/atlantis.git" When(credentials.GetToken()).ThenReturn("token", nil) - When(workingDir.Clone(modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default")).ThenReturn( - "", true, nil, - ) + When(workingDir.Clone(Any[logging.SimpleLogging](), Eq(modifiedBaseRepo), Eq(models.PullRequest{BaseRepo: modifiedBaseRepo}), + Eq("default"))).ThenReturn("", true, nil) - _, success, _ := ghAppWorkingDir.Clone(headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") + _, success, _ := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") - workingDir.VerifyWasCalledOnce().Clone(modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default") + workingDir.VerifyWasCalledOnce().Clone(logger, modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default") Assert(t, success == true, "clone url mutation error") } diff --git a/server/events/import_command_runner.go b/server/events/import_command_runner.go index 51fbe34981..7f850ca409 100644 --- a/server/events/import_command_runner.go +++ b/server/events/import_command_runner.go @@ -36,7 +36,7 @@ func (v *ImportCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { // required the Atlantis status checks to pass, then we've now changed // the mergeability status of the pull request. // This sets the approved, mergeable, and sqlocked status in the context. - ctx.PullRequestStatus, err = v.pullReqStatusFetcher.FetchPullStatus(ctx.Pull) + ctx.PullRequestStatus, err = v.pullReqStatusFetcher.FetchPullStatus(ctx.Log, ctx.Pull) if err != nil { // On error we continue the request with mergeable assumed false. // We want to continue because not all import will need this status, diff --git a/server/events/import_command_runner_test.go b/server/events/import_command_runner_test.go index 140fb86685..694f7d79e8 100644 --- a/server/events/import_command_runner_test.go +++ b/server/events/import_command_runner_test.go @@ -64,7 +64,7 @@ func TestImportCommandRunner_Run(t *testing.T) { modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} ctx := &command.Context{ User: testdata.User, - Log: logging.NewNoopLogger(t), + Log: logger, Scope: scopeNull, Pull: modelPull, HeadRepo: testdata.GithubRepo, @@ -72,16 +72,18 @@ func TestImportCommandRunner_Run(t *testing.T) { } cmd := &events.CommentCommand{Name: command.Import} - When(pullReqStatusFetcher.FetchPullStatus(modelPull)).ThenReturn(tt.pullReqStatus, nil) + When(pullReqStatusFetcher.FetchPullStatus(logger, modelPull)).ThenReturn(tt.pullReqStatus, nil) When(projectCommandBuilder.BuildImportCommands(ctx, cmd)).ThenReturn(tt.projectCmds, nil) importCommandRunner.Run(ctx, cmd) Assert(t, ctx.PullRequestStatus.Mergeable == true, "PullRequestStatus must be set for import_requirements") if tt.expNoComment { - vcsClient.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Never()).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } else { - vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, modelPull.Num, tt.expComment, "import") + vcsClient.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull.Num), Eq(tt.expComment), Eq("import")) } }) } diff --git a/server/events/instrumented_pull_closed_executor.go b/server/events/instrumented_pull_closed_executor.go index 0751a0d21b..5b1bba01e6 100644 --- a/server/events/instrumented_pull_closed_executor.go +++ b/server/events/instrumented_pull_closed_executor.go @@ -1,8 +1,6 @@ package events import ( - "strconv" - "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" "github.com/runatlantis/atlantis/server/metrics" @@ -31,24 +29,20 @@ func NewInstrumentedPullClosedExecutor( } } -func (e *InstrumentedPullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullRequest) error { - log := e.log.With( - "repository", repo.FullName, - "pull-num", strconv.Itoa(pull.Num), - ) +func (e *InstrumentedPullClosedExecutor) CleanUpPull(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) error { executionSuccess := e.scope.Counter(metrics.ExecutionSuccessMetric) executionError := e.scope.Counter(metrics.ExecutionErrorMetric) executionTime := e.scope.Timer(metrics.ExecutionTimeMetric).Start() defer executionTime.Stop() - log.Info("Initiating cleanup of pull data.") + logger.Info("Initiating cleanup of pull data.") - err := e.cleaner.CleanUpPull(repo, pull) + err := e.cleaner.CleanUpPull(logger, repo, pull) if err != nil { executionError.Inc(1) - log.Err("error during cleanup of pull data", err) + logger.Err("error during cleanup of pull data", err) return err } diff --git a/server/events/markdown_renderer.go b/server/events/markdown_renderer.go index 74a72c6719..5bbfc8a47e 100644 --- a/server/events/markdown_renderer.go +++ b/server/events/markdown_renderer.go @@ -72,6 +72,7 @@ type commonData struct { EnableDiffMarkdownFormat bool ExecutableName string HideUnchangedPlanComments bool + VcsRequestType string } // errData is data about an error response. @@ -170,13 +171,20 @@ func NewMarkdownRenderer( // Render formats the data into a markdown string. // nolint: interfacer -func (m *MarkdownRenderer) Render(res command.Result, cmdName command.Name, subCmd, log string, verbose bool, vcsHost models.VCSHostType) string { - commandStr := cases.Title(language.English).String(strings.Replace(cmdName.String(), "_", " ", -1)) +func (m *MarkdownRenderer) Render(ctx *command.Context, res command.Result, cmd PullCommand) string { + commandStr := cases.Title(language.English).String(strings.Replace(cmd.CommandName().String(), "_", " ", -1)) + var vcsRequestType string + if ctx.Pull.BaseRepo.VCSHost.Type == models.Gitlab { + vcsRequestType = "Merge Request" + } else { + vcsRequestType = "Pull Request" + } + common := commonData{ Command: commandStr, - SubCommand: subCmd, - Verbose: verbose, - Log: log, + SubCommand: cmd.SubCommandName(), + Verbose: cmd.IsVerbose(), + Log: ctx.Log.GetHistory(), PlansDeleted: res.PlansDeleted, DisableApplyAll: m.disableApplyAll || m.disableApply, DisableApply: m.disableApply, @@ -184,6 +192,7 @@ func (m *MarkdownRenderer) Render(res command.Result, cmdName command.Name, subC EnableDiffMarkdownFormat: m.enableDiffMarkdownFormat, ExecutableName: m.executableName, HideUnchangedPlanComments: m.hideUnchangedPlanComments, + VcsRequestType: vcsRequestType, } templates := m.markdownTemplates @@ -194,10 +203,12 @@ func (m *MarkdownRenderer) Render(res command.Result, cmdName command.Name, subC if res.Failure != "" { return m.renderTemplateTrimSpace(templates.Lookup("failureWithLog"), failureData{res.Failure, "", common}) } - return m.renderProjectResults(res.ProjectResults, common, vcsHost) + return m.renderProjectResults(ctx, res.ProjectResults, common) } -func (m *MarkdownRenderer) renderProjectResults(results []command.ProjectResult, common commonData, vcsHost models.VCSHostType) string { +func (m *MarkdownRenderer) renderProjectResults(ctx *command.Context, results []command.ProjectResult, common commonData) string { + vcsHost := ctx.Pull.BaseRepo.VCSHost.Type + var resultsTmplData []projectResultTmplData numPlanSuccesses := 0 numPolicyCheckSuccesses := 0 diff --git a/server/events/markdown_renderer_test.go b/server/events/markdown_renderer_test.go index eebd1a8b87..39810dab13 100644 --- a/server/events/markdown_renderer_test.go +++ b/server/events/markdown_renderer_test.go @@ -23,6 +23,7 @@ import ( "github.com/runatlantis/atlantis/server/events" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) @@ -60,17 +61,36 @@ func TestRenderErr(t *testing.T) { } r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { res := command.Result{ Error: c.Error, } for _, verbose := range []bool{true, false} { t.Run(fmt.Sprintf("%s_%t", c.Description, verbose), func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, models.Github) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected)+"\n\n
Log\n

\n\n```\nlog```\n

", normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log)), normalize(s)) } }) } @@ -88,34 +108,54 @@ func TestRenderFailure(t *testing.T) { "apply failure", command.Apply, "failure", - "**Apply Failed**: failure\n", + "**Apply Failed**: failure", }, { "plan failure", command.Plan, "failure", - "**Plan Failed**: failure\n", + "**Plan Failed**: failure", }, { "policy check failure", command.PolicyCheck, "failure", - "**Policy Check Failed**: failure\n", + "**Policy Check Failed**: failure", }, } r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } + for _, c := range cases { res := command.Result{ Failure: c.Failure, } for _, verbose := range []bool{true, false} { t.Run(fmt.Sprintf("%s_%t", c.Description, verbose), func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, models.Github) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log)), normalize(s)) } }) } @@ -124,11 +164,27 @@ func TestRenderFailure(t *testing.T) { func TestRenderErrAndFailure(t *testing.T) { r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false) + logger := logging.NewNoopLogger(t).WithHistory() + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } res := command.Result{ Error: errors.New("error"), Failure: "failure", } - s := r.Render(res, command.Plan, "", "", false, models.Github) + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + + s := r.Render(ctx, res, cmd) Equals(t, "**Plan Error**\n```\nerror\n```", normalize(s)) } @@ -147,7 +203,7 @@ func TestRenderProjectResults(t *testing.T) { "", []command.ProjectResult{}, models.Github, - "Ran Plan for 0 projects:\n\n\n", + "Ran Plan for 0 projects:\n\n", }, { "single successful plan", @@ -166,23 +222,32 @@ func TestRenderProjectResults(t *testing.T) { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -203,25 +268,33 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ - + $$$shell + atlantis plan -d path -w workspace + $$$ :twisted_rightwards_arrows: Upstream was modified, a new merge was performed. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -242,23 +315,32 @@ $$$ }, }, models.Github, - `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -296,7 +378,8 @@ $$$ }, }, models.Github, - `Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$ #### Policy Set: $policy1$ $$$diff @@ -317,16 +400,24 @@ policy set: policy1: requires: 1 approval(s), have: 0. policy set: policy2: passed. $$$ * :heavy_check_mark: To **approve** this project, comment: - * $$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -357,7 +448,8 @@ $$$ }, }, models.Github, - `Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$
Show Output @@ -382,26 +474,34 @@ FAIL - - main - WARNING: Null Resource creation is prohibit $$$ +
+ #### Policy Approval Status: $$$ policy set: policy1: requires: 1 approval(s), have: 0. $$$ * :heavy_check_mark: To **approve** this project, comment: - * $$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ - - + $$$shell + atlantis plan -d path -w workspace + $$$ $$$ policy set: policy1: 2 tests, 1 passed, 0 warnings, 1 failure, 0 exceptions $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -420,7 +520,8 @@ $$$ }, }, models.Github, - `Ran Import for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Import for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff import-output @@ -429,7 +530,9 @@ $$$ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -448,7 +551,8 @@ $$$ }, }, models.Github, - `Ran State $rm$ for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran State $rm$ for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff state-rm-output @@ -457,7 +561,9 @@ $$$ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -472,7 +578,8 @@ $$$ }, }, models.Github, - `Ran Apply for dir: $path$ workspace: $workspace$ + ` +Ran Apply for dir: $path$ workspace: $workspace$ $$$diff success @@ -492,7 +599,8 @@ $$$ }, }, models.Github, - `Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff success @@ -527,10 +635,12 @@ $$$ }, }, models.Github, - `Ran Plan for 2 projects: + ` +Ran Plan for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -538,10 +648,14 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -550,20 +664,28 @@ terraform-output2 $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path2 -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2) + $$$shell + atlantis apply -d path2 -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- ### Plan Summary 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -605,10 +727,12 @@ $$$ }, }, models.Github, - `Ran Policy Check for 2 projects: + ` +Ran Policy Check for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ #### Policy Set: $policy1$ @@ -618,10 +742,14 @@ $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -632,16 +760,24 @@ $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path2 -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2) + $$$shell + atlantis apply -d path2 -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -662,10 +798,12 @@ $$$ }, }, models.Github, - `Ran Apply for 2 projects: + ` +Ran Apply for 2 projects: 1. project: $projectname$ dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ +--- ### 1. project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff @@ -696,7 +834,8 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ **Plan Error** $$$ @@ -716,7 +855,8 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ **Plan Failed**: failure `, @@ -749,11 +889,13 @@ $$$ }, }, models.Github, - `Ran Plan for 3 projects: + ` +Ran Plan for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. project: $projectname$ dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -761,10 +903,14 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. dir: $path2$ workspace: $workspace$ @@ -782,10 +928,14 @@ $$$ 3 projects, 1 with changes, 0 with no changes, 2 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -833,11 +983,13 @@ $$$ }, }, models.Github, - `Ran Policy Check for 3 projects: + ` +Ran Policy Check for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. project: $projectname$ dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ #### Policy Set: $policy1$ @@ -847,10 +999,14 @@ $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. dir: $path2$ workspace: $workspace$ @@ -866,10 +1022,14 @@ $$$ policy set: policy1: requires: 1 approval(s), have: 0. $$$ * :heavy_check_mark: To **approve** this project, comment: - * $$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 3. project: $projectname$ dir: $path3$ workspace: $workspace$ @@ -879,12 +1039,18 @@ error $$$ --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * $atlantis approve_policies$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis approve_policies + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan$ + $$$shell + atlantis plan + $$$ `, }, { @@ -909,11 +1075,13 @@ $$$ }, }, models.Github, - `Ran Apply for 3 projects: + ` +Ran Apply for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -959,11 +1127,13 @@ $$$ }, }, models.Github, - `Ran Apply for 3 projects: + ` +Ran Apply for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -990,6 +1160,19 @@ $$$ } r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { t.Run(c.Description, func(t *testing.T) { res := command.Result{ @@ -997,11 +1180,18 @@ $$$ } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, c.SubCommand, "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + SubName: c.SubCommand, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected+ + fmt.Sprintf("
Log\n

\n\n```\n%s\n```\n

", log)), normalize(s)) } }) } @@ -1034,17 +1224,22 @@ func TestRenderProjectResultsDisableApplyAll(t *testing.T) { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -1064,17 +1259,22 @@ $$$ }, }, models.Github, - `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -1104,10 +1304,12 @@ $$$ }, }, models.Github, - `Ran Plan for 2 projects: + ` +Ran Plan for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -1115,10 +1317,14 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -1127,10 +1333,14 @@ terraform-output2 $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path2 -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2) + $$$shell + atlantis apply -d path2 -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- ### Plan Summary @@ -1150,6 +1360,19 @@ $$$ "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { t.Run(c.Description, func(t *testing.T) { res := command.Result{ @@ -1157,11 +1380,17 @@ $$$ } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) } }) } @@ -1169,7 +1398,7 @@ $$$ } } -// Test that if disable apply is set then the apply footer is not added +// Test that if disable apply is set then the apply footer is not added func TestRenderProjectResultsDisableApply(t *testing.T) { cases := []struct { Description string @@ -1194,15 +1423,18 @@ func TestRenderProjectResultsDisableApply(t *testing.T) { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -1222,15 +1454,18 @@ $$$ }, }, models.Github, - `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -1260,19 +1495,23 @@ $$$ }, }, models.Github, - `Ran Plan for 2 projects: + ` +Ran Plan for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -1280,9 +1519,11 @@ $$$diff terraform-output2 $$$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- ### Plan Summary @@ -1303,6 +1544,19 @@ $$$ "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { t.Run(c.Description, func(t *testing.T) { res := command.Result{ @@ -1310,11 +1564,17 @@ $$$ } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) } }) } @@ -1342,8 +1602,21 @@ func TestRenderCustomPolicyCheckTemplate_DisableApplyAll(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } - rendered := r.Render(command.Result{ + res := command.Result{ ProjectResults: []command.ProjectResult{ { Workspace: "workspace", @@ -1361,8 +1634,14 @@ func TestRenderCustomPolicyCheckTemplate_DisableApplyAll(t *testing.T) { }, }, }, - }, command.PolicyCheck, "", "log", false, models.Github) - exp = `Ran Policy Check for dir: $path$ workspace: $workspace$ + } + cmd := &events.CommentCommand{ + Name: command.PolicyCheck, + Verbose: false, + } + rendered := r.Render(ctx, res, cmd) + exp = ` +Ran Policy Check for dir: $path$ workspace: $workspace$ #### Policy Set: $policy1$ $$$diff @@ -1371,10 +1650,15 @@ $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$` + $$$shell + atlantis plan -d path -w workspace + $$$ +` Equals(t, normalize(exp), normalize(rendered)) } @@ -1392,8 +1676,20 @@ func TestRenderProjectResults_DisableFolding(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) - - rendered := mr.Render(command.Result{ + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } + res := command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1401,7 +1697,12 @@ func TestRenderProjectResults_DisableFolding(t *testing.T) { Error: errors.New(strings.Repeat("line\n", 13)), }, }, - }, command.Plan, "", "log", false, models.Github) + } + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) Equals(t, false, strings.Contains(rendered, "\n
")) } @@ -1484,8 +1785,20 @@ func TestRenderProjectResults_WrappedErr(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) - - rendered := mr.Render(command.Result{ + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } + res := command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1493,10 +1806,16 @@ func TestRenderProjectResults_WrappedErr(t *testing.T) { Error: errors.New(c.Output), }, }, - }, command.Plan, "", "log", false, c.VCSHost) + } + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) var exp string if c.ShouldWrap { - exp = `Ran Plan for dir: $.$ workspace: $default$ + exp = ` +Ran Plan for dir: $.$ workspace: $default$ **Plan Error**
Show Output @@ -1504,14 +1823,16 @@ func TestRenderProjectResults_WrappedErr(t *testing.T) { $$$ ` + c.Output + ` $$$ -
` +
+` } else { exp = `Ran Plan for dir: $.$ workspace: $default$ **Plan Error** $$$ ` + c.Output + ` -$$$` +$$$ +` } Equals(t, normalize(exp), normalize(rendered)) }) @@ -1523,69 +1844,80 @@ $$$` func TestRenderProjectResults_WrapSingleProject(t *testing.T) { cases := []struct { VCSHost models.VCSHostType + VcsRequestType string GitlabCommonMarkSupport bool Output string ShouldWrap bool }{ { - VCSHost: models.Github, - Output: strings.Repeat("line\n", 1), - ShouldWrap: false, + VCSHost: models.Github, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 1), + ShouldWrap: false, }, { - VCSHost: models.Github, - Output: strings.Repeat("line\n", 13) + "No changes. Infrastructure is up-to-date.", - ShouldWrap: true, + VCSHost: models.Github, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 13) + "No changes. Infrastructure is up-to-date.", + ShouldWrap: true, }, { VCSHost: models.Gitlab, + VcsRequestType: "Merge Request", GitlabCommonMarkSupport: false, Output: strings.Repeat("line\n", 1), ShouldWrap: false, }, { VCSHost: models.Gitlab, + VcsRequestType: "Merge Request", GitlabCommonMarkSupport: false, Output: strings.Repeat("line\n", 13), ShouldWrap: false, }, { VCSHost: models.Gitlab, + VcsRequestType: "Merge Request", GitlabCommonMarkSupport: true, Output: strings.Repeat("line\n", 1), ShouldWrap: false, }, { VCSHost: models.Gitlab, + VcsRequestType: "Merge Request", GitlabCommonMarkSupport: true, Output: strings.Repeat("line\n", 13) + "No changes. Infrastructure is up-to-date.", ShouldWrap: true, }, { - VCSHost: models.BitbucketCloud, - Output: strings.Repeat("line\n", 1), - ShouldWrap: false, + VCSHost: models.BitbucketCloud, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 1), + ShouldWrap: false, }, { - VCSHost: models.BitbucketCloud, - Output: strings.Repeat("line\n", 13), - ShouldWrap: false, + VCSHost: models.BitbucketCloud, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 13), + ShouldWrap: false, }, { - VCSHost: models.BitbucketServer, - Output: strings.Repeat("line\n", 1), - ShouldWrap: false, + VCSHost: models.BitbucketServer, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 1), + ShouldWrap: false, }, { - VCSHost: models.BitbucketServer, - Output: strings.Repeat("line\n", 13), - ShouldWrap: false, + VCSHost: models.BitbucketServer, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 13), + ShouldWrap: false, }, } for _, c := range cases { - for _, cmd := range []command.Name{command.Plan, command.Apply} { - t.Run(fmt.Sprintf("%s_%s_%v", c.VCSHost.String(), cmd.String(), c.ShouldWrap), + for _, cmdName := range []command.Name{command.Plan, command.Apply} { + t.Run(fmt.Sprintf("%s_%s_%v", c.VCSHost.String(), cmdName.String(), c.ShouldWrap), func(t *testing.T) { mr := events.NewMarkdownRenderer( c.GitlabCommonMarkSupport, // gitlabSupportsCommonMark @@ -1598,8 +1930,22 @@ func TestRenderProjectResults_WrapSingleProject(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } + var pr command.ProjectResult - switch cmd { + switch cmdName { case command.Plan: pr = command.ProjectResult{ RepoRelDir: ".", @@ -1618,58 +1964,84 @@ func TestRenderProjectResults_WrapSingleProject(t *testing.T) { ApplySuccess: c.Output, } } - rendered := mr.Render(command.Result{ + res := command.Result{ ProjectResults: []command.ProjectResult{pr}, - }, cmd, "", "log", false, c.VCSHost) + } + cmd := &events.CommentCommand{ + Name: cmdName, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) // Check result. var exp string - switch cmd { + switch cmdName { case command.Plan: if c.ShouldWrap { - exp = `Ran Plan for dir: $.$ workspace: $default$ + exp = ` +Ran Plan for dir: $.$ workspace: $default$
Show Output $$$diff ` + strings.TrimSpace(c.Output) + ` $$$ +
* :arrow_forward: To **apply** this plan, comment: - * $applycmd$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + applycmd + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $replancmd$ - + $$$shell + replancmd + $$$ No changes. Infrastructure is up-to-date. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$` +* :fast_forward: To **apply** all unapplied plans from this ` + c.VcsRequestType + `, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this ` + c.VcsRequestType + `, comment: + $$$shell + atlantis unlock + $$$ +` } else { - exp = `Ran Plan for dir: $.$ workspace: $default$ + exp = ` +Ran Plan for dir: $.$ workspace: $default$ $$$diff ` + strings.TrimSpace(c.Output) + ` $$$ * :arrow_forward: To **apply** this plan, comment: - * $applycmd$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + applycmd + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $replancmd$ + $$$shell + replancmd + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$` +* :fast_forward: To **apply** all unapplied plans from this ` + c.VcsRequestType + `, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this ` + c.VcsRequestType + `, comment: + $$$shell + atlantis unlock + $$$ +` } case command.Apply: if c.ShouldWrap { - exp = `Ran Apply for dir: $.$ workspace: $default$ + exp = ` +Ran Apply for dir: $.$ workspace: $default$
Show Output @@ -1677,13 +2049,16 @@ $$$diff ` + strings.TrimSpace(c.Output) + ` $$$ -
` + +` } else { - exp = `Ran Apply for dir: $.$ workspace: $default$ + exp = ` +Ran Apply for dir: $.$ workspace: $default$ $$$diff ` + strings.TrimSpace(c.Output) + ` -$$$` +$$$ +` } } @@ -1705,8 +2080,21 @@ func TestRenderProjectResults_MultiProjectApplyWrapped(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } tfOut := strings.Repeat("line\n", 13) - rendered := mr.Render(command.Result{ + res := command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1719,11 +2107,18 @@ func TestRenderProjectResults_MultiProjectApplyWrapped(t *testing.T) { ApplySuccess: tfOut, }, }, - }, command.Apply, "", "log", false, models.Github) - exp := `Ran Apply for 2 projects: + } + cmd := &events.CommentCommand{ + Name: command.Apply, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) + exp := ` +Ran Apply for 2 projects: 1. dir: $.$ workspace: $staging$ 1. dir: $.$ workspace: $production$ +--- ### 1. dir: $.$ workspace: $staging$
Show Output @@ -1764,8 +2159,21 @@ func TestRenderProjectResults_MultiProjectPlanWrapped(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } tfOut := strings.Repeat("line\n", 13) + "Plan: 1 to add, 0 to change, 0 to destroy." - rendered := mr.Render(command.Result{ + res := command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1788,11 +2196,18 @@ func TestRenderProjectResults_MultiProjectPlanWrapped(t *testing.T) { }, }, }, - }, command.Plan, "", "log", false, models.Github) - exp := `Ran Plan for 2 projects: + } + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) + exp := ` +Ran Plan for 2 projects: 1. dir: $.$ workspace: $staging$ 1. dir: $.$ workspace: $production$ +--- ### 1. dir: $.$ workspace: $staging$
Show Output @@ -1800,13 +2215,17 @@ func TestRenderProjectResults_MultiProjectPlanWrapped(t *testing.T) { $$$diff ` + tfOut + ` $$$ +
* :arrow_forward: To **apply** this plan, comment: - * $staging-apply-cmd$ -* :put_litter_in_its_place: To **delete** this plan click [here](staging-lock-url) + $$$shell + staging-apply-cmd + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](staging-lock-url) * :repeat: To **plan** this project again, comment: - * $staging-replan-cmd$ -
+ $$$shell + staging-replan-cmd + $$$ Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -1816,13 +2235,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. $$$diff ` + tfOut + ` $$$ + * :arrow_forward: To **apply** this plan, comment: - * $production-apply-cmd$ -* :put_litter_in_its_place: To **delete** this plan click [here](production-lock-url) + $$$shell + production-apply-cmd + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](production-lock-url) * :repeat: To **plan** this project again, comment: - * $production-replan-cmd$ - + $$$shell + production-replan-cmd + $$$ Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -1830,10 +2253,14 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ ` Equals(t, normalize(exp), normalize(rendered)) } @@ -1842,11 +2269,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. // all the plans as a result. func TestRenderProjectResults_PlansDeleted(t *testing.T) { cases := map[string]struct { - cr command.Result + res command.Result exp string }{ "one failure": { - cr: command.Result{ + res: command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1856,12 +2283,14 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) { }, PlansDeleted: true, }, - exp: `Ran Plan for dir: $.$ workspace: $staging$ + exp: ` +Ran Plan for dir: $.$ workspace: $staging$ -**Plan Failed**: failure`, +**Plan Failed**: failure +`, }, "two failures": { - cr: command.Result{ + res: command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1876,10 +2305,12 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) { }, PlansDeleted: true, }, - exp: `Ran Plan for 2 projects: + exp: ` +Ran Plan for 2 projects: 1. dir: $.$ workspace: $staging$ 1. dir: $.$ workspace: $production$ +--- ### 1. dir: $.$ workspace: $staging$ **Plan Failed**: failure @@ -1895,7 +2326,7 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) { `, }, "one failure, one success": { - cr: command.Result{ + res: command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1915,10 +2346,12 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) { }, PlansDeleted: true, }, - exp: `Ran Plan for 2 projects: + exp: ` +Ran Plan for 2 projects: 1. dir: $.$ workspace: $staging$ 1. dir: $.$ workspace: $production$ +--- ### 1. dir: $.$ workspace: $staging$ **Plan Failed**: failure @@ -1952,7 +2385,24 @@ This plan was not saved because one or more projects failed and automerge requir "atlantis", // executableName false, // hideUnchangedPlanComments ) - rendered := mr.Render(c.cr, command.Plan, "", "log", false, models.Github) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + rendered := mr.Render(ctx, c.res, cmd) Equals(t, normalize(c.exp), normalize(rendered)) }) } @@ -1972,7 +2422,7 @@ func TestRenderProjectResultsWithRepoLockingDisabled(t *testing.T) { command.Plan, []command.ProjectResult{}, models.Github, - "Ran Plan for 0 projects:\n\n\n", + "Ran Plan for 0 projects:\n\n", }, { "single successful plan", @@ -1990,22 +2440,31 @@ func TestRenderProjectResultsWithRepoLockingDisabled(t *testing.T) { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2025,24 +2484,32 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ - + $$$shell + atlantis plan -d path -w workspace + $$$ :twisted_rightwards_arrows: Upstream was modified, a new merge was performed. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2062,22 +2529,31 @@ $$$ }, }, models.Github, - `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2091,7 +2567,8 @@ $$$ }, }, models.Github, - `Ran Apply for dir: $path$ workspace: $workspace$ + ` +Ran Apply for dir: $path$ workspace: $workspace$ $$$diff success @@ -2110,7 +2587,8 @@ $$$ }, }, models.Github, - `Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff success @@ -2144,10 +2622,12 @@ $$$ }, }, models.Github, - `Ran Plan for 2 projects: + ` +Ran Plan for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2155,9 +2635,13 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -2166,19 +2650,27 @@ terraform-output2 $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path2 -w workspace$ + $$$shell + atlantis apply -d path2 -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- ### Plan Summary 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2198,10 +2690,12 @@ $$$ }, }, models.Github, - `Ran Apply for 2 projects: + ` +Ran Apply for 2 projects: 1. project: $projectname$ dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ +--- ### 1. project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff @@ -2231,7 +2725,8 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ **Plan Error** $$$ @@ -2250,7 +2745,8 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ **Plan Failed**: failure `, @@ -2282,11 +2778,13 @@ $$$ }, }, models.Github, - `Ran Plan for 3 projects: + ` +Ran Plan for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. project: $projectname$ dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2294,9 +2792,13 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. dir: $path2$ workspace: $workspace$ @@ -2314,10 +2816,14 @@ $$$ 3 projects, 1 with changes, 0 with no changes, 2 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2341,11 +2847,13 @@ $$$ }, }, models.Github, - `Ran Apply for 3 projects: + ` +Ran Apply for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2390,11 +2898,13 @@ $$$ }, }, models.Github, - `Ran Apply for 3 projects: + ` +Ran Apply for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2431,6 +2941,19 @@ $$$ "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { t.Run(c.Description, func(t *testing.T) { res := command.Result{ @@ -2438,11 +2961,17 @@ $$$ } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected+ + fmt.Sprintf("
Log\n

\n\n```\n%s\n```\n

", log)), normalize(s)) } }) } @@ -2450,7 +2979,145 @@ $$$ } } -const tfOutput = `An execution plan has been generated and is shown below. +func TestRenderProjectResultsWithGitLab(t *testing.T) { + cases := []struct { + Description string + Command command.Name + ProjectResults []command.ProjectResult + VCSHost models.VCSHostType + Expected string + }{ + { + "multiple successful plans", + command.Plan, + []command.ProjectResult{ + { + Workspace: "workspace", + RepoRelDir: "path", + PlanSuccess: &models.PlanSuccess{ + TerraformOutput: "terraform-output", + LockURL: "lock-url", + ApplyCmd: "atlantis apply -d path -w workspace", + RePlanCmd: "atlantis plan -d path -w workspace", + }, + }, + { + Workspace: "workspace", + RepoRelDir: "path2", + ProjectName: "projectname", + PlanSuccess: &models.PlanSuccess{ + TerraformOutput: "terraform-output2", + LockURL: "lock-url2", + ApplyCmd: "atlantis apply -d path2 -w workspace", + RePlanCmd: "atlantis plan -d path2 -w workspace", + }, + }, + }, + models.Gitlab, + ` +Ran Plan for 2 projects: + +1. dir: $path$ workspace: $workspace$ +1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- + +### 1. dir: $path$ workspace: $workspace$ +$$$diff +terraform-output +$$$ + +* :arrow_forward: To **apply** this plan, comment: + $$$shell + atlantis apply -d path -w workspace + $$$ +* :repeat: To **plan** this project again, comment: + $$$shell + atlantis plan -d path -w workspace + $$$ + +--- +### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ +$$$diff +terraform-output2 +$$$ + +* :arrow_forward: To **apply** this plan, comment: + $$$shell + atlantis apply -d path2 -w workspace + $$$ +* :repeat: To **plan** this project again, comment: + $$$shell + atlantis plan -d path2 -w workspace + $$$ + +--- +### Plan Summary + +2 projects, 2 with changes, 0 with no changes, 0 failed + +* :fast_forward: To **apply** all unapplied plans from this Merge Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Merge Request, comment: + $$$shell + atlantis unlock + $$$ +`, + }, + } + + r := events.NewMarkdownRenderer( + false, // gitlabSupportsCommonMark + false, // disableApplyAll + false, // disableApply + false, // disableMarkdownFolding + true, // disableRepoLocking + false, // enableDiffMarkdownFormat + "", // MarkdownTemplateOverridesDir + "atlantis", // executableName + false, // hideUnchangedPlanComments + ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + for _, c := range cases { + t.Run(c.Description, func(t *testing.T) { + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } + res := command.Result{ + ProjectResults: c.ProjectResults, + } + for _, verbose := range []bool{true, false} { + t.Run(c.Description, func(t *testing.T) { + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) + if !verbose { + Equals(t, normalize(c.Expected), normalize(s)) + } else { + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) + } + }) + } + }) + } +} + +const tfOutput = ` +An execution plan has been generated and is shown below. Resource actions are indicated with the following symbols: ~ update in-place -/+ destroy and then create replacement @@ -2657,7 +3324,8 @@ var cases = []struct { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$
Show Output @@ -2845,11 +3513,13 @@ Terraform will perform the following actions: Plan: 1 to add, 2 to change, 1 to destroy. $$$ +
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ - + $$$shell + atlantis plan -d path -w workspace + $$$ Plan: 1 to add, 2 to change, 1 to destroy. `, }, @@ -2867,19 +3537,38 @@ func TestRenderProjectResultsWithEnableDiffMarkdownFormat(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) for _, c := range cases { t.Run(c.Description, func(t *testing.T) { + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } res := command.Result{ ProjectResults: c.ProjectResults, } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) } }) } @@ -2903,17 +3592,34 @@ func BenchmarkRenderProjectResultsWithEnableDiffMarkdownFormat(b *testing.B) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(b).WithHistory() + logText := "log" + logger.Info(logText) for _, c := range cases { b.Run(c.Description, func(b *testing.B) { + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } res := command.Result{ ProjectResults: c.ProjectResults, } for _, verbose := range []bool{true, false} { b.Run(fmt.Sprintf("verbose %t", verbose), func(b *testing.B) { + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } b.ReportAllocs() for i := 0; i < b.N; i++ { - render = r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + render = r.Render(ctx, res, cmd) } Render = render }) @@ -2970,11 +3676,13 @@ func TestRenderProjectResultsHideUnchangedPlans(t *testing.T) { }, }, models.Github, - `Ran Plan for 3 projects: + ` +Ran Plan for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ 1. project: $projectname2$ dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2982,10 +3690,14 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 3. project: $projectname2$ dir: $path3$ workspace: $workspace$ @@ -2994,20 +3706,28 @@ terraform-output3 $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path3 -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url3) + $$$shell + atlantis apply -d path3 -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url3) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path3 -w workspace$ + $$$shell + atlantis plan -d path3 -w workspace + $$$ --- ### Plan Summary 3 projects, 2 with changes, 1 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -3049,37 +3769,64 @@ $$$ }, }, models.Github, - `Ran Plan for 3 projects: + ` +Ran Plan for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ 1. project: $projectname2$ dir: $path3$ workspace: $workspace$ +--- ### Plan Summary 3 projects, 0 with changes, 3 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, } r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", true) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + for _, c := range cases { t.Run(c.Description, func(t *testing.T) { + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } res := command.Result{ ProjectResults: c.ProjectResults, } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, c.SubCommand, "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + SubName: c.SubCommand, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) } }) } diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index 30b344ea3a..d298b2cee7 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -4,10 +4,12 @@ package events import ( - pegomock "github.com/petergtz/pegomock/v4" - models "github.com/runatlantis/atlantis/server/events/models" "reflect" "time" + + pegomock "github.com/petergtz/pegomock/v4" + models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" ) type MockWorkingDir struct { @@ -25,11 +27,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{headRepo, p, workspace} + params := []pegomock.Param{logger, headRepo, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -48,11 +50,11 @@ func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, wo return ret0, ret1, ret2 } -func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { +func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p} + params := []pegomock.Param{logger, r, p} result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -63,11 +65,11 @@ func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { return ret0 } -func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error { +func (mock *MockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace} + params := []pegomock.Param{logger, r, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteForWorkspace", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -78,11 +80,11 @@ func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullReque return ret0 } -func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error { +func (mock *MockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace, path, projectName} + params := []pegomock.Param{logger, r, p, workspace, path, projectName} result := pegomock.GetGenericMockFrom(mock).Invoke("DeletePlan", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -93,11 +95,11 @@ func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, work return ret0 } -func (mock *MockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) { +func (mock *MockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace} + params := []pegomock.Param{logger, r, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("GetGitUntrackedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 []string var ret1 error @@ -150,11 +152,11 @@ func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, w return ret0, ret1 } -func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool { +func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) bool { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{cloneDir} + params := []pegomock.Param{logger, cloneDir} result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) var ret0 bool if len(result) != 0 { @@ -210,8 +212,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{headRepo, p, workspace} +func (verifier *VerifierMockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{logger, headRepo, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -221,32 +223,36 @@ type MockWorkingDir_Clone_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - headRepo, p, workspace := c.GetAllCapturedArguments() - return headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, headRepo, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return } -func (verifier *VerifierMockWorkingDir) Delete(r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { - params := []pegomock.Param{r, p} +func (verifier *VerifierMockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { + params := []pegomock.Param{logger, r, p} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params, verifier.timeout) return &MockWorkingDir_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -256,28 +262,32 @@ type MockWorkingDir_Delete_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - r, p := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1] +func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, r, p := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1] } -func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { - params := []pegomock.Param{r, p, workspace} +func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteForWorkspace", params, verifier.timeout) return &MockWorkingDir_DeleteForWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -287,32 +297,36 @@ type MockWorkingDir_DeleteForWorkspace_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, r, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return } -func (verifier *VerifierMockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification { - params := []pegomock.Param{r, p, workspace, path, projectName} +func (verifier *VerifierMockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace, path, projectName} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeletePlan", params, verifier.timeout) return &MockWorkingDir_DeletePlan_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -322,25 +336,25 @@ type MockWorkingDir_DeletePlan_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string, string, string) { - r, p, workspace, path, projectName := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1] +func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, string, string) { + logger, r, p, workspace, path, projectName := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1] } -func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string, _param3 []string, _param4 []string) { +func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _param4 []string, _param5 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) } _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { @@ -350,12 +364,16 @@ func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments( for u, param := range params[4] { _param4[u] = param.(string) } + _param5 = make([]string, len(c.methodInvocations)) + for u, param := range params[5] { + _param5[u] = param.(string) + } } return } -func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { - params := []pegomock.Param{r, p, workspace} +func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetGitUntrackedFiles", params, verifier.timeout) return &MockWorkingDir_GetGitUntrackedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -365,25 +383,29 @@ type MockWorkingDir_GetGitUntrackedFiles_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, r, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return @@ -455,8 +477,8 @@ func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArgumen return } -func (verifier *VerifierMockWorkingDir) HasDiverged(cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification { - params := []pegomock.Param{cloneDir} +func (verifier *VerifierMockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification { + params := []pegomock.Param{logger, cloneDir} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HasDiverged", params, verifier.timeout) return &MockWorkingDir_HasDiverged_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -466,17 +488,21 @@ type MockWorkingDir_HasDiverged_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() string { - cloneDir := c.GetAllCapturedArguments() - return cloneDir[len(cloneDir)-1] +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) { + logger, cloneDir := c.GetAllCapturedArguments() + return logger[len(logger)-1], cloneDir[len(cloneDir)-1] } -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(string) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(string) } } return diff --git a/server/events/mocks/mock_azuredevops_pull_getter.go b/server/events/mocks/mock_azuredevops_pull_getter.go index ce3a618b88..95c57e0e89 100644 --- a/server/events/mocks/mock_azuredevops_pull_getter.go +++ b/server/events/mocks/mock_azuredevops_pull_getter.go @@ -7,6 +7,7 @@ import ( azuredevops "github.com/mcdafydd/go-azuredevops/azuredevops" pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -26,11 +27,11 @@ func NewMockAzureDevopsPullGetter(options ...pegomock.Option) *MockAzureDevopsPu func (mock *MockAzureDevopsPullGetter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockAzureDevopsPullGetter) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockAzureDevopsPullGetter) GetPullRequest(repo models.Repo, pullNum int) (*azuredevops.GitPullRequest, error) { +func (mock *MockAzureDevopsPullGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*azuredevops.GitPullRequest, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockAzureDevopsPullGetter().") } - params := []pegomock.Param{repo, pullNum} + params := []pegomock.Param{logger, repo, pullNum} result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequest", params, []reflect.Type{reflect.TypeOf((**azuredevops.GitPullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 *azuredevops.GitPullRequest var ret1 error @@ -82,8 +83,8 @@ type VerifierMockAzureDevopsPullGetter struct { timeout time.Duration } -func (verifier *VerifierMockAzureDevopsPullGetter) GetPullRequest(repo models.Repo, pullNum int) *MockAzureDevopsPullGetter_GetPullRequest_OngoingVerification { - params := []pegomock.Param{repo, pullNum} +func (verifier *VerifierMockAzureDevopsPullGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) *MockAzureDevopsPullGetter_GetPullRequest_OngoingVerification { + params := []pegomock.Param{logger, repo, pullNum} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequest", params, verifier.timeout) return &MockAzureDevopsPullGetter_GetPullRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -93,21 +94,25 @@ type MockAzureDevopsPullGetter_GetPullRequest_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockAzureDevopsPullGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (models.Repo, int) { - repo, pullNum := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1] +func (c *MockAzureDevopsPullGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, int) { + logger, repo, pullNum := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pullNum[len(pullNum)-1] } -func (c *MockAzureDevopsPullGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int) { +func (c *MockAzureDevopsPullGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []int) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(models.Repo) + } + _param2 = make([]int, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(int) } } return diff --git a/server/events/mocks/mock_command_requirement_handler.go b/server/events/mocks/mock_command_requirement_handler.go index d302bf4525..d5a36f20eb 100644 --- a/server/events/mocks/mock_command_requirement_handler.go +++ b/server/events/mocks/mock_command_requirement_handler.go @@ -44,12 +44,12 @@ func (mock *MockCommandRequirementHandler) ValidateApplyProject(repoDir string, return ret0, ret1 } -func (mock *MockCommandRequirementHandler) ValidateProjectDependencies(_param0 command.ProjectContext) (string, error) { +func (mock *MockCommandRequirementHandler) ValidateImportProject(repoDir string, ctx command.ProjectContext) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommandRequirementHandler().") } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("ValidateProjectDependencies", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + params := []pegomock.Param{repoDir, ctx} + result := pegomock.GetGenericMockFrom(mock).Invoke("ValidateImportProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 error if len(result) != 0 { @@ -63,12 +63,12 @@ func (mock *MockCommandRequirementHandler) ValidateProjectDependencies(_param0 c return ret0, ret1 } -func (mock *MockCommandRequirementHandler) ValidateImportProject(repoDir string, ctx command.ProjectContext) (string, error) { +func (mock *MockCommandRequirementHandler) ValidatePlanProject(repoDir string, ctx command.ProjectContext) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommandRequirementHandler().") } params := []pegomock.Param{repoDir, ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("ValidateImportProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + result := pegomock.GetGenericMockFrom(mock).Invoke("ValidatePlanProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 error if len(result) != 0 { @@ -82,12 +82,12 @@ func (mock *MockCommandRequirementHandler) ValidateImportProject(repoDir string, return ret0, ret1 } -func (mock *MockCommandRequirementHandler) ValidatePlanProject(repoDir string, ctx command.ProjectContext) (string, error) { +func (mock *MockCommandRequirementHandler) ValidateProjectDependencies(ctx command.ProjectContext) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommandRequirementHandler().") } - params := []pegomock.Param{repoDir, ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("ValidatePlanProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + params := []pegomock.Param{ctx} + result := pegomock.GetGenericMockFrom(mock).Invoke("ValidateProjectDependencies", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 error if len(result) != 0 { @@ -230,3 +230,30 @@ func (c *MockCommandRequirementHandler_ValidatePlanProject_OngoingVerification) } return } + +func (verifier *VerifierMockCommandRequirementHandler) ValidateProjectDependencies(ctx command.ProjectContext) *MockCommandRequirementHandler_ValidateProjectDependencies_OngoingVerification { + params := []pegomock.Param{ctx} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ValidateProjectDependencies", params, verifier.timeout) + return &MockCommandRequirementHandler_ValidateProjectDependencies_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockCommandRequirementHandler_ValidateProjectDependencies_OngoingVerification struct { + mock *MockCommandRequirementHandler + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockCommandRequirementHandler_ValidateProjectDependencies_OngoingVerification) GetCapturedArguments() command.ProjectContext { + ctx := c.GetAllCapturedArguments() + return ctx[len(ctx)-1] +} + +func (c *MockCommandRequirementHandler_ValidateProjectDependencies_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]command.ProjectContext, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(command.ProjectContext) + } + } + return +} diff --git a/server/events/mocks/mock_comment_building.go b/server/events/mocks/mock_comment_building.go index 1e461a07ee..1d25d4eacd 100644 --- a/server/events/mocks/mock_comment_building.go +++ b/server/events/mocks/mock_comment_building.go @@ -24,49 +24,49 @@ func NewMockCommentBuilder(options ...pegomock.Option) *MockCommentBuilder { func (mock *MockCommentBuilder) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockCommentBuilder) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockCommentBuilder) BuildApplyComment(repoRelDir string, workspace string, project string, autoMergeDisabled bool) string { +func (mock *MockCommentBuilder) BuildApplyComment(repoRelDir string, workspace string, project string, autoMergeDisabled bool, mergeMethod string) string { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommentBuilder().") } - params := []pegomock.Param{repoRelDir, workspace, project, autoMergeDisabled} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildApplyComment", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) + _params := []pegomock.Param{repoRelDir, workspace, project, autoMergeDisabled, mergeMethod} + _result := pegomock.GetGenericMockFrom(mock).Invoke("BuildApplyComment", _params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) + var _ret0 string + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].(string) } } - return ret0 + return _ret0 } func (mock *MockCommentBuilder) BuildApprovePoliciesComment(repoRelDir string, workspace string, project string) string { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommentBuilder().") } - params := []pegomock.Param{repoRelDir, workspace, project} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildApprovePoliciesComment", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) + _params := []pegomock.Param{repoRelDir, workspace, project} + _result := pegomock.GetGenericMockFrom(mock).Invoke("BuildApprovePoliciesComment", _params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) + var _ret0 string + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].(string) } } - return ret0 + return _ret0 } func (mock *MockCommentBuilder) BuildPlanComment(repoRelDir string, workspace string, project string, commentArgs []string) string { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommentBuilder().") } - params := []pegomock.Param{repoRelDir, workspace, project, commentArgs} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildPlanComment", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) + _params := []pegomock.Param{repoRelDir, workspace, project, commentArgs} + _result := pegomock.GetGenericMockFrom(mock).Invoke("BuildPlanComment", _params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) + var _ret0 string + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].(string) } } - return ret0 + return _ret0 } func (mock *MockCommentBuilder) VerifyWasCalledOnce() *VerifierMockCommentBuilder { @@ -106,9 +106,9 @@ type VerifierMockCommentBuilder struct { timeout time.Duration } -func (verifier *VerifierMockCommentBuilder) BuildApplyComment(repoRelDir string, workspace string, project string, autoMergeDisabled bool) *MockCommentBuilder_BuildApplyComment_OngoingVerification { - params := []pegomock.Param{repoRelDir, workspace, project, autoMergeDisabled} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildApplyComment", params, verifier.timeout) +func (verifier *VerifierMockCommentBuilder) BuildApplyComment(repoRelDir string, workspace string, project string, autoMergeDisabled bool, mergeMethod string) *MockCommentBuilder_BuildApplyComment_OngoingVerification { + _params := []pegomock.Param{repoRelDir, workspace, project, autoMergeDisabled, mergeMethod} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildApplyComment", _params, verifier.timeout) return &MockCommentBuilder_BuildApplyComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -117,37 +117,51 @@ type MockCommentBuilder_BuildApplyComment_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockCommentBuilder_BuildApplyComment_OngoingVerification) GetCapturedArguments() (string, string, string, bool) { - repoRelDir, workspace, project, autoMergeDisabled := c.GetAllCapturedArguments() - return repoRelDir[len(repoRelDir)-1], workspace[len(workspace)-1], project[len(project)-1], autoMergeDisabled[len(autoMergeDisabled)-1] +func (c *MockCommentBuilder_BuildApplyComment_OngoingVerification) GetCapturedArguments() (string, string, string, bool, string) { + repoRelDir, workspace, project, autoMergeDisabled, mergeMethod := c.GetAllCapturedArguments() + return repoRelDir[len(repoRelDir)-1], workspace[len(workspace)-1], project[len(project)-1], autoMergeDisabled[len(autoMergeDisabled)-1], mergeMethod[len(mergeMethod)-1] } -func (c *MockCommentBuilder_BuildApplyComment_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []string, _param3 []bool) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) +func (c *MockCommentBuilder_BuildApplyComment_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []string, _param3 []bool, _param4 []string) { + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(string) + } } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) + if len(_params) > 2 { + _param2 = make([]string, len(c.methodInvocations)) + for u, param := range _params[2] { + _param2[u] = param.(string) + } } - _param3 = make([]bool, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(bool) + if len(_params) > 3 { + _param3 = make([]bool, len(c.methodInvocations)) + for u, param := range _params[3] { + _param3[u] = param.(bool) + } + } + if len(_params) > 4 { + _param4 = make([]string, len(c.methodInvocations)) + for u, param := range _params[4] { + _param4[u] = param.(string) + } } } return } func (verifier *VerifierMockCommentBuilder) BuildApprovePoliciesComment(repoRelDir string, workspace string, project string) *MockCommentBuilder_BuildApprovePoliciesComment_OngoingVerification { - params := []pegomock.Param{repoRelDir, workspace, project} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildApprovePoliciesComment", params, verifier.timeout) + _params := []pegomock.Param{repoRelDir, workspace, project} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildApprovePoliciesComment", _params, verifier.timeout) return &MockCommentBuilder_BuildApprovePoliciesComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -162,27 +176,33 @@ func (c *MockCommentBuilder_BuildApprovePoliciesComment_OngoingVerification) Get } func (c *MockCommentBuilder_BuildApprovePoliciesComment_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(string) + } } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) + if len(_params) > 2 { + _param2 = make([]string, len(c.methodInvocations)) + for u, param := range _params[2] { + _param2[u] = param.(string) + } } } return } func (verifier *VerifierMockCommentBuilder) BuildPlanComment(repoRelDir string, workspace string, project string, commentArgs []string) *MockCommentBuilder_BuildPlanComment_OngoingVerification { - params := []pegomock.Param{repoRelDir, workspace, project, commentArgs} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildPlanComment", params, verifier.timeout) + _params := []pegomock.Param{repoRelDir, workspace, project, commentArgs} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildPlanComment", _params, verifier.timeout) return &MockCommentBuilder_BuildPlanComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -197,23 +217,31 @@ func (c *MockCommentBuilder_BuildPlanComment_OngoingVerification) GetCapturedArg } func (c *MockCommentBuilder_BuildPlanComment_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []string, _param3 [][]string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]string, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(string) + } } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) + if len(_params) > 2 { + _param2 = make([]string, len(c.methodInvocations)) + for u, param := range _params[2] { + _param2[u] = param.(string) + } } - _param3 = make([][]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.([]string) + if len(_params) > 3 { + _param3 = make([][]string, len(c.methodInvocations)) + for u, param := range _params[3] { + _param3[u] = param.([]string) + } } } return diff --git a/server/events/mocks/mock_commit_status_updater.go b/server/events/mocks/mock_commit_status_updater.go index 9525b9846f..b0e5fcea97 100644 --- a/server/events/mocks/mock_commit_status_updater.go +++ b/server/events/mocks/mock_commit_status_updater.go @@ -7,6 +7,7 @@ import ( pegomock "github.com/petergtz/pegomock/v4" command "github.com/runatlantis/atlantis/server/events/command" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -26,11 +27,11 @@ func NewMockCommitStatusUpdater(options ...pegomock.Option) *MockCommitStatusUpd func (mock *MockCommitStatusUpdater) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockCommitStatusUpdater) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockCommitStatusUpdater) UpdateCombined(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name) error { +func (mock *MockCommitStatusUpdater) UpdateCombined(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommitStatusUpdater().") } - params := []pegomock.Param{repo, pull, status, cmdName} + params := []pegomock.Param{logger, repo, pull, status, cmdName} result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateCombined", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -41,11 +42,11 @@ func (mock *MockCommitStatusUpdater) UpdateCombined(repo models.Repo, pull model return ret0 } -func (mock *MockCommitStatusUpdater) UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name, numSuccess int, numTotal int) error { +func (mock *MockCommitStatusUpdater) UpdateCombinedCount(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name, numSuccess int, numTotal int) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommitStatusUpdater().") } - params := []pegomock.Param{repo, pull, status, cmdName, numSuccess, numTotal} + params := []pegomock.Param{logger, repo, pull, status, cmdName, numSuccess, numTotal} result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateCombinedCount", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -56,11 +57,11 @@ func (mock *MockCommitStatusUpdater) UpdateCombinedCount(repo models.Repo, pull return ret0 } -func (mock *MockCommitStatusUpdater) UpdatePostWorkflowHook(pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error { +func (mock *MockCommitStatusUpdater) UpdatePostWorkflowHook(logger logging.SimpleLogging, pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommitStatusUpdater().") } - params := []pegomock.Param{pull, status, hookDescription, runtimeDescription, url} + params := []pegomock.Param{logger, pull, status, hookDescription, runtimeDescription, url} result := pegomock.GetGenericMockFrom(mock).Invoke("UpdatePostWorkflowHook", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -71,11 +72,11 @@ func (mock *MockCommitStatusUpdater) UpdatePostWorkflowHook(pull models.PullRequ return ret0 } -func (mock *MockCommitStatusUpdater) UpdatePreWorkflowHook(pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error { +func (mock *MockCommitStatusUpdater) UpdatePreWorkflowHook(logger logging.SimpleLogging, pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCommitStatusUpdater().") } - params := []pegomock.Param{pull, status, hookDescription, runtimeDescription, url} + params := []pegomock.Param{logger, pull, status, hookDescription, runtimeDescription, url} result := pegomock.GetGenericMockFrom(mock).Invoke("UpdatePreWorkflowHook", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -123,8 +124,8 @@ type VerifierMockCommitStatusUpdater struct { timeout time.Duration } -func (verifier *VerifierMockCommitStatusUpdater) UpdateCombined(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name) *MockCommitStatusUpdater_UpdateCombined_OngoingVerification { - params := []pegomock.Param{repo, pull, status, cmdName} +func (verifier *VerifierMockCommitStatusUpdater) UpdateCombined(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name) *MockCommitStatusUpdater_UpdateCombined_OngoingVerification { + params := []pegomock.Param{logger, repo, pull, status, cmdName} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateCombined", params, verifier.timeout) return &MockCommitStatusUpdater_UpdateCombined_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -134,36 +135,40 @@ type MockCommitStatusUpdater_UpdateCombined_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockCommitStatusUpdater_UpdateCombined_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, models.CommitStatus, command.Name) { - repo, pull, status, cmdName := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1], status[len(status)-1], cmdName[len(cmdName)-1] +func (c *MockCommitStatusUpdater_UpdateCombined_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, models.CommitStatus, command.Name) { + logger, repo, pull, status, cmdName := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pull[len(pull)-1], status[len(status)-1], cmdName[len(cmdName)-1] } -func (c *MockCommitStatusUpdater_UpdateCombined_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []models.CommitStatus, _param3 []command.Name) { +func (c *MockCommitStatusUpdater_UpdateCombined_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []models.CommitStatus, _param4 []command.Name) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]models.CommitStatus, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(models.CommitStatus) + _param2[u] = param.(models.PullRequest) } - _param3 = make([]command.Name, len(c.methodInvocations)) + _param3 = make([]models.CommitStatus, len(c.methodInvocations)) for u, param := range params[3] { - _param3[u] = param.(command.Name) + _param3[u] = param.(models.CommitStatus) + } + _param4 = make([]command.Name, len(c.methodInvocations)) + for u, param := range params[4] { + _param4[u] = param.(command.Name) } } return } -func (verifier *VerifierMockCommitStatusUpdater) UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name, numSuccess int, numTotal int) *MockCommitStatusUpdater_UpdateCombinedCount_OngoingVerification { - params := []pegomock.Param{repo, pull, status, cmdName, numSuccess, numTotal} +func (verifier *VerifierMockCommitStatusUpdater) UpdateCombinedCount(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, cmdName command.Name, numSuccess int, numTotal int) *MockCommitStatusUpdater_UpdateCombinedCount_OngoingVerification { + params := []pegomock.Param{logger, repo, pull, status, cmdName, numSuccess, numTotal} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateCombinedCount", params, verifier.timeout) return &MockCommitStatusUpdater_UpdateCombinedCount_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -173,44 +178,48 @@ type MockCommitStatusUpdater_UpdateCombinedCount_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockCommitStatusUpdater_UpdateCombinedCount_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, models.CommitStatus, command.Name, int, int) { - repo, pull, status, cmdName, numSuccess, numTotal := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1], status[len(status)-1], cmdName[len(cmdName)-1], numSuccess[len(numSuccess)-1], numTotal[len(numTotal)-1] +func (c *MockCommitStatusUpdater_UpdateCombinedCount_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, models.CommitStatus, command.Name, int, int) { + logger, repo, pull, status, cmdName, numSuccess, numTotal := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pull[len(pull)-1], status[len(status)-1], cmdName[len(cmdName)-1], numSuccess[len(numSuccess)-1], numTotal[len(numTotal)-1] } -func (c *MockCommitStatusUpdater_UpdateCombinedCount_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []models.CommitStatus, _param3 []command.Name, _param4 []int, _param5 []int) { +func (c *MockCommitStatusUpdater_UpdateCombinedCount_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []models.CommitStatus, _param4 []command.Name, _param5 []int, _param6 []int) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]models.CommitStatus, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(models.CommitStatus) + _param2[u] = param.(models.PullRequest) } - _param3 = make([]command.Name, len(c.methodInvocations)) + _param3 = make([]models.CommitStatus, len(c.methodInvocations)) for u, param := range params[3] { - _param3[u] = param.(command.Name) + _param3[u] = param.(models.CommitStatus) } - _param4 = make([]int, len(c.methodInvocations)) + _param4 = make([]command.Name, len(c.methodInvocations)) for u, param := range params[4] { - _param4[u] = param.(int) + _param4[u] = param.(command.Name) } _param5 = make([]int, len(c.methodInvocations)) for u, param := range params[5] { _param5[u] = param.(int) } + _param6 = make([]int, len(c.methodInvocations)) + for u, param := range params[6] { + _param6[u] = param.(int) + } } return } -func (verifier *VerifierMockCommitStatusUpdater) UpdatePostWorkflowHook(pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) *MockCommitStatusUpdater_UpdatePostWorkflowHook_OngoingVerification { - params := []pegomock.Param{pull, status, hookDescription, runtimeDescription, url} +func (verifier *VerifierMockCommitStatusUpdater) UpdatePostWorkflowHook(logger logging.SimpleLogging, pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) *MockCommitStatusUpdater_UpdatePostWorkflowHook_OngoingVerification { + params := []pegomock.Param{logger, pull, status, hookDescription, runtimeDescription, url} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdatePostWorkflowHook", params, verifier.timeout) return &MockCommitStatusUpdater_UpdatePostWorkflowHook_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -220,25 +229,25 @@ type MockCommitStatusUpdater_UpdatePostWorkflowHook_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockCommitStatusUpdater_UpdatePostWorkflowHook_OngoingVerification) GetCapturedArguments() (models.PullRequest, models.CommitStatus, string, string, string) { - pull, status, hookDescription, runtimeDescription, url := c.GetAllCapturedArguments() - return pull[len(pull)-1], status[len(status)-1], hookDescription[len(hookDescription)-1], runtimeDescription[len(runtimeDescription)-1], url[len(url)-1] +func (c *MockCommitStatusUpdater_UpdatePostWorkflowHook_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.PullRequest, models.CommitStatus, string, string, string) { + logger, pull, status, hookDescription, runtimeDescription, url := c.GetAllCapturedArguments() + return logger[len(logger)-1], pull[len(pull)-1], status[len(status)-1], hookDescription[len(hookDescription)-1], runtimeDescription[len(runtimeDescription)-1], url[len(url)-1] } -func (c *MockCommitStatusUpdater_UpdatePostWorkflowHook_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest, _param1 []models.CommitStatus, _param2 []string, _param3 []string, _param4 []string) { +func (c *MockCommitStatusUpdater_UpdatePostWorkflowHook_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.PullRequest, _param2 []models.CommitStatus, _param3 []string, _param4 []string, _param5 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.CommitStatus, len(c.methodInvocations)) + _param1 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.CommitStatus) + _param1[u] = param.(models.PullRequest) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.CommitStatus, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.CommitStatus) } _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { @@ -248,12 +257,16 @@ func (c *MockCommitStatusUpdater_UpdatePostWorkflowHook_OngoingVerification) Get for u, param := range params[4] { _param4[u] = param.(string) } + _param5 = make([]string, len(c.methodInvocations)) + for u, param := range params[5] { + _param5[u] = param.(string) + } } return } -func (verifier *VerifierMockCommitStatusUpdater) UpdatePreWorkflowHook(pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) *MockCommitStatusUpdater_UpdatePreWorkflowHook_OngoingVerification { - params := []pegomock.Param{pull, status, hookDescription, runtimeDescription, url} +func (verifier *VerifierMockCommitStatusUpdater) UpdatePreWorkflowHook(logger logging.SimpleLogging, pull models.PullRequest, status models.CommitStatus, hookDescription string, runtimeDescription string, url string) *MockCommitStatusUpdater_UpdatePreWorkflowHook_OngoingVerification { + params := []pegomock.Param{logger, pull, status, hookDescription, runtimeDescription, url} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdatePreWorkflowHook", params, verifier.timeout) return &MockCommitStatusUpdater_UpdatePreWorkflowHook_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -263,25 +276,25 @@ type MockCommitStatusUpdater_UpdatePreWorkflowHook_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockCommitStatusUpdater_UpdatePreWorkflowHook_OngoingVerification) GetCapturedArguments() (models.PullRequest, models.CommitStatus, string, string, string) { - pull, status, hookDescription, runtimeDescription, url := c.GetAllCapturedArguments() - return pull[len(pull)-1], status[len(status)-1], hookDescription[len(hookDescription)-1], runtimeDescription[len(runtimeDescription)-1], url[len(url)-1] +func (c *MockCommitStatusUpdater_UpdatePreWorkflowHook_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.PullRequest, models.CommitStatus, string, string, string) { + logger, pull, status, hookDescription, runtimeDescription, url := c.GetAllCapturedArguments() + return logger[len(logger)-1], pull[len(pull)-1], status[len(status)-1], hookDescription[len(hookDescription)-1], runtimeDescription[len(runtimeDescription)-1], url[len(url)-1] } -func (c *MockCommitStatusUpdater_UpdatePreWorkflowHook_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest, _param1 []models.CommitStatus, _param2 []string, _param3 []string, _param4 []string) { +func (c *MockCommitStatusUpdater_UpdatePreWorkflowHook_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.PullRequest, _param2 []models.CommitStatus, _param3 []string, _param4 []string, _param5 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.CommitStatus, len(c.methodInvocations)) + _param1 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.CommitStatus) + _param1[u] = param.(models.PullRequest) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.CommitStatus, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.CommitStatus) } _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { @@ -291,6 +304,10 @@ func (c *MockCommitStatusUpdater_UpdatePreWorkflowHook_OngoingVerification) GetA for u, param := range params[4] { _param4[u] = param.(string) } + _param5 = make([]string, len(c.methodInvocations)) + for u, param := range params[5] { + _param5[u] = param.(string) + } } return } diff --git a/server/events/mocks/mock_custom_step_runner.go b/server/events/mocks/mock_custom_step_runner.go index 8805706322..7662d22ba0 100644 --- a/server/events/mocks/mock_custom_step_runner.go +++ b/server/events/mocks/mock_custom_step_runner.go @@ -26,7 +26,7 @@ func NewMockCustomStepRunner(options ...pegomock.Option) *MockCustomStepRunner { func (mock *MockCustomStepRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockCustomStepRunner) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockCustomStepRunner) Run(ctx command.ProjectContext, cmd string, path string, envs map[string]string, streamOutput bool, postProcessOutput valid.PostProcessRunOutputOption) (string, error) { +func (mock *MockCustomStepRunner) Run(ctx command.ProjectContext, shell *valid.CommandShell, cmd string, path string, envs map[string]string, streamOutput bool, postProcessOutput valid.PostProcessRunOutputOption) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockCustomStepRunner().") } @@ -82,7 +82,7 @@ type VerifierMockCustomStepRunner struct { timeout time.Duration } -func (verifier *VerifierMockCustomStepRunner) Run(ctx command.ProjectContext, cmd string, path string, envs map[string]string, streamOutput bool, postProcessOutput valid.PostProcessRunOutputOption) *MockCustomStepRunner_Run_OngoingVerification { +func (verifier *VerifierMockCustomStepRunner) Run(ctx command.ProjectContext, shell *valid.CommandShell, cmd string, path string, envs map[string]string, streamOutput bool, postProcessOutput valid.PostProcessRunOutputOption) *MockCustomStepRunner_Run_OngoingVerification { params := []pegomock.Param{ctx, cmd, path, envs, streamOutput, postProcessOutput} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) return &MockCustomStepRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} diff --git a/server/events/mocks/mock_delete_lock_command.go b/server/events/mocks/mock_delete_lock_command.go index ce1afd3b72..a8511f28c8 100644 --- a/server/events/mocks/mock_delete_lock_command.go +++ b/server/events/mocks/mock_delete_lock_command.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockDeleteLockCommand(options ...pegomock.Option) *MockDeleteLockCommand func (mock *MockDeleteLockCommand) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockDeleteLockCommand) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, error) { +func (mock *MockDeleteLockCommand) DeleteLock(logger logging.SimpleLogging, id string) (*models.ProjectLock, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockDeleteLockCommand().") } - params := []pegomock.Param{id} + params := []pegomock.Param{logger, id} result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteLock", params, []reflect.Type{reflect.TypeOf((**models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 *models.ProjectLock var ret1 error @@ -44,11 +45,11 @@ func (mock *MockDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, e return ret0, ret1 } -func (mock *MockDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) (int, error) { +func (mock *MockDeleteLockCommand) DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) (int, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockDeleteLockCommand().") } - params := []pegomock.Param{repoFullName, pullNum} + params := []pegomock.Param{logger, repoFullName, pullNum} result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteLocksByPull", params, []reflect.Type{reflect.TypeOf((*int)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 int var ret1 error @@ -100,8 +101,8 @@ type VerifierMockDeleteLockCommand struct { timeout time.Duration } -func (verifier *VerifierMockDeleteLockCommand) DeleteLock(id string) *MockDeleteLockCommand_DeleteLock_OngoingVerification { - params := []pegomock.Param{id} +func (verifier *VerifierMockDeleteLockCommand) DeleteLock(logger logging.SimpleLogging, id string) *MockDeleteLockCommand_DeleteLock_OngoingVerification { + params := []pegomock.Param{logger, id} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteLock", params, verifier.timeout) return &MockDeleteLockCommand_DeleteLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -111,24 +112,28 @@ type MockDeleteLockCommand_DeleteLock_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetCapturedArguments() string { - id := c.GetAllCapturedArguments() - return id[len(id)-1] +func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) { + logger, id := c.GetAllCapturedArguments() + return logger[len(logger)-1], id[len(id)-1] } -func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { +func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(string) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(string) } } return } -func (verifier *VerifierMockDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification { - params := []pegomock.Param{repoFullName, pullNum} +func (verifier *VerifierMockDeleteLockCommand) DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification { + params := []pegomock.Param{logger, repoFullName, pullNum} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteLocksByPull", params, verifier.timeout) return &MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -138,21 +143,25 @@ type MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetCapturedArguments() (string, int) { - repoFullName, pullNum := c.GetAllCapturedArguments() - return repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] +func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string, int) { + logger, repoFullName, pullNum := c.GetAllCapturedArguments() + return logger[len(logger)-1], repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] } -func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []int) { +func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string, _param2 []int) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(string) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]string, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(string) + } + _param2 = make([]int, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(int) } } return diff --git a/server/events/mocks/mock_env_step_runner.go b/server/events/mocks/mock_env_step_runner.go index bfc7f97a57..0d99311987 100644 --- a/server/events/mocks/mock_env_step_runner.go +++ b/server/events/mocks/mock_env_step_runner.go @@ -4,10 +4,12 @@ package mocks import ( - pegomock "github.com/petergtz/pegomock/v4" - command "github.com/runatlantis/atlantis/server/events/command" "reflect" "time" + + pegomock "github.com/petergtz/pegomock/v4" + "github.com/runatlantis/atlantis/server/core/config/valid" + command "github.com/runatlantis/atlantis/server/events/command" ) type MockEnvStepRunner struct { @@ -25,7 +27,7 @@ func NewMockEnvStepRunner(options ...pegomock.Option) *MockEnvStepRunner { func (mock *MockEnvStepRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockEnvStepRunner) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockEnvStepRunner) Run(ctx command.ProjectContext, cmd string, value string, path string, envs map[string]string) (string, error) { +func (mock *MockEnvStepRunner) Run(ctx command.ProjectContext, shell *valid.CommandShell, cmd string, value string, path string, envs map[string]string) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockEnvStepRunner().") } @@ -81,7 +83,7 @@ type VerifierMockEnvStepRunner struct { timeout time.Duration } -func (verifier *VerifierMockEnvStepRunner) Run(ctx command.ProjectContext, cmd string, value string, path string, envs map[string]string) *MockEnvStepRunner_Run_OngoingVerification { +func (verifier *VerifierMockEnvStepRunner) Run(ctx command.ProjectContext, shell *valid.CommandShell, cmd string, value string, path string, envs map[string]string) *MockEnvStepRunner_Run_OngoingVerification { params := []pegomock.Param{ctx, cmd, value, path, envs} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) return &MockEnvStepRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} diff --git a/server/events/mocks/mock_event_parsing.go b/server/events/mocks/mock_event_parsing.go index de0a543c38..ca816e9a97 100644 --- a/server/events/mocks/mock_event_parsing.go +++ b/server/events/mocks/mock_event_parsing.go @@ -4,10 +4,13 @@ package mocks import ( - github "github.com/google/go-github/v58/github" + gitea "code.gitea.io/sdk/gitea" + github "github.com/google/go-github/v65/github" azuredevops "github.com/mcdafydd/go-azuredevops/azuredevops" pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + gitea0 "github.com/runatlantis/atlantis/server/events/vcs/gitea" + logging "github.com/runatlantis/atlantis/server/logging" go_gitlab "github.com/xanzy/go-gitlab" "reflect" "time" @@ -290,11 +293,100 @@ func (mock *MockEventParsing) ParseBitbucketServerPullEvent(body []byte) (models return ret0, ret1, ret2, ret3, ret4 } -func (mock *MockEventParsing) ParseGithubIssueCommentEvent(comment *github.IssueCommentEvent) (models.Repo, models.User, int, error) { +func (mock *MockEventParsing) ParseGiteaIssueCommentEvent(event gitea0.GiteaIssueCommentPayload) (models.Repo, models.User, int, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockEventParsing().") } - params := []pegomock.Param{comment} + params := []pegomock.Param{event} + result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGiteaIssueCommentEvent", params, []reflect.Type{reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*int)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 models.Repo + var ret1 models.User + var ret2 int + var ret3 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(models.Repo) + } + if result[1] != nil { + ret1 = result[1].(models.User) + } + if result[2] != nil { + ret2 = result[2].(int) + } + if result[3] != nil { + ret3 = result[3].(error) + } + } + return ret0, ret1, ret2, ret3 +} + +func (mock *MockEventParsing) ParseGiteaPull(pull *gitea.PullRequest) (models.PullRequest, models.Repo, models.Repo, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockEventParsing().") + } + params := []pegomock.Param{pull} + result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGiteaPull", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 models.PullRequest + var ret1 models.Repo + var ret2 models.Repo + var ret3 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(models.PullRequest) + } + if result[1] != nil { + ret1 = result[1].(models.Repo) + } + if result[2] != nil { + ret2 = result[2].(models.Repo) + } + if result[3] != nil { + ret3 = result[3].(error) + } + } + return ret0, ret1, ret2, ret3 +} + +func (mock *MockEventParsing) ParseGiteaPullRequestEvent(event gitea.PullRequest) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockEventParsing().") + } + params := []pegomock.Param{event} + result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGiteaPullRequestEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.PullRequestEventType)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 models.PullRequest + var ret1 models.PullRequestEventType + var ret2 models.Repo + var ret3 models.Repo + var ret4 models.User + var ret5 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(models.PullRequest) + } + if result[1] != nil { + ret1 = result[1].(models.PullRequestEventType) + } + if result[2] != nil { + ret2 = result[2].(models.Repo) + } + if result[3] != nil { + ret3 = result[3].(models.Repo) + } + if result[4] != nil { + ret4 = result[4].(models.User) + } + if result[5] != nil { + ret5 = result[5].(error) + } + } + return ret0, ret1, ret2, ret3, ret4, ret5 +} + +func (mock *MockEventParsing) ParseGithubIssueCommentEvent(logger logging.SimpleLogging, comment *github.IssueCommentEvent) (models.Repo, models.User, int, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockEventParsing().") + } + params := []pegomock.Param{logger, comment} result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubIssueCommentEvent", params, []reflect.Type{reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*int)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 models.Repo var ret1 models.User @@ -317,11 +409,11 @@ func (mock *MockEventParsing) ParseGithubIssueCommentEvent(comment *github.Issue return ret0, ret1, ret2, ret3 } -func (mock *MockEventParsing) ParseGithubPull(ghPull *github.PullRequest) (models.PullRequest, models.Repo, models.Repo, error) { +func (mock *MockEventParsing) ParseGithubPull(logger logging.SimpleLogging, ghPull *github.PullRequest) (models.PullRequest, models.Repo, models.Repo, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockEventParsing().") } - params := []pegomock.Param{ghPull} + params := []pegomock.Param{logger, ghPull} result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubPull", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 models.PullRequest var ret1 models.Repo @@ -344,11 +436,11 @@ func (mock *MockEventParsing) ParseGithubPull(ghPull *github.PullRequest) (model return ret0, ret1, ret2, ret3 } -func (mock *MockEventParsing) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) { +func (mock *MockEventParsing) ParseGithubPullEvent(logger logging.SimpleLogging, pullEvent *github.PullRequestEvent) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockEventParsing().") } - params := []pegomock.Param{pullEvent} + params := []pegomock.Param{logger, pullEvent} result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubPullEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.PullRequestEventType)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 models.PullRequest var ret1 models.PullRequestEventType @@ -817,8 +909,89 @@ func (c *MockEventParsing_ParseBitbucketServerPullEvent_OngoingVerification) Get return } -func (verifier *VerifierMockEventParsing) ParseGithubIssueCommentEvent(comment *github.IssueCommentEvent) *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification { - params := []pegomock.Param{comment} +func (verifier *VerifierMockEventParsing) ParseGiteaIssueCommentEvent(event gitea0.GiteaIssueCommentPayload) *MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification { + params := []pegomock.Param{event} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGiteaIssueCommentEvent", params, verifier.timeout) + return &MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification struct { + mock *MockEventParsing + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification) GetCapturedArguments() gitea0.GiteaIssueCommentPayload { + event := c.GetAllCapturedArguments() + return event[len(event)-1] +} + +func (c *MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []gitea0.GiteaIssueCommentPayload) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]gitea0.GiteaIssueCommentPayload, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(gitea0.GiteaIssueCommentPayload) + } + } + return +} + +func (verifier *VerifierMockEventParsing) ParseGiteaPull(pull *gitea.PullRequest) *MockEventParsing_ParseGiteaPull_OngoingVerification { + params := []pegomock.Param{pull} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGiteaPull", params, verifier.timeout) + return &MockEventParsing_ParseGiteaPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockEventParsing_ParseGiteaPull_OngoingVerification struct { + mock *MockEventParsing + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockEventParsing_ParseGiteaPull_OngoingVerification) GetCapturedArguments() *gitea.PullRequest { + pull := c.GetAllCapturedArguments() + return pull[len(pull)-1] +} + +func (c *MockEventParsing_ParseGiteaPull_OngoingVerification) GetAllCapturedArguments() (_param0 []*gitea.PullRequest) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]*gitea.PullRequest, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(*gitea.PullRequest) + } + } + return +} + +func (verifier *VerifierMockEventParsing) ParseGiteaPullRequestEvent(event gitea.PullRequest) *MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification { + params := []pegomock.Param{event} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGiteaPullRequestEvent", params, verifier.timeout) + return &MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification struct { + mock *MockEventParsing + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification) GetCapturedArguments() gitea.PullRequest { + event := c.GetAllCapturedArguments() + return event[len(event)-1] +} + +func (c *MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []gitea.PullRequest) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]gitea.PullRequest, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(gitea.PullRequest) + } + } + return +} + +func (verifier *VerifierMockEventParsing) ParseGithubIssueCommentEvent(logger logging.SimpleLogging, comment *github.IssueCommentEvent) *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification { + params := []pegomock.Param{logger, comment} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubIssueCommentEvent", params, verifier.timeout) return &MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -828,24 +1001,28 @@ type MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification) GetCapturedArguments() *github.IssueCommentEvent { - comment := c.GetAllCapturedArguments() - return comment[len(comment)-1] +func (c *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, *github.IssueCommentEvent) { + logger, comment := c.GetAllCapturedArguments() + return logger[len(logger)-1], comment[len(comment)-1] } -func (c *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []*github.IssueCommentEvent) { +func (c *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []*github.IssueCommentEvent) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]*github.IssueCommentEvent, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(*github.IssueCommentEvent) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]*github.IssueCommentEvent, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(*github.IssueCommentEvent) } } return } -func (verifier *VerifierMockEventParsing) ParseGithubPull(ghPull *github.PullRequest) *MockEventParsing_ParseGithubPull_OngoingVerification { - params := []pegomock.Param{ghPull} +func (verifier *VerifierMockEventParsing) ParseGithubPull(logger logging.SimpleLogging, ghPull *github.PullRequest) *MockEventParsing_ParseGithubPull_OngoingVerification { + params := []pegomock.Param{logger, ghPull} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubPull", params, verifier.timeout) return &MockEventParsing_ParseGithubPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -855,24 +1032,28 @@ type MockEventParsing_ParseGithubPull_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockEventParsing_ParseGithubPull_OngoingVerification) GetCapturedArguments() *github.PullRequest { - ghPull := c.GetAllCapturedArguments() - return ghPull[len(ghPull)-1] +func (c *MockEventParsing_ParseGithubPull_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, *github.PullRequest) { + logger, ghPull := c.GetAllCapturedArguments() + return logger[len(logger)-1], ghPull[len(ghPull)-1] } -func (c *MockEventParsing_ParseGithubPull_OngoingVerification) GetAllCapturedArguments() (_param0 []*github.PullRequest) { +func (c *MockEventParsing_ParseGithubPull_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []*github.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]*github.PullRequest, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(*github.PullRequest) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]*github.PullRequest, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(*github.PullRequest) } } return } -func (verifier *VerifierMockEventParsing) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) *MockEventParsing_ParseGithubPullEvent_OngoingVerification { - params := []pegomock.Param{pullEvent} +func (verifier *VerifierMockEventParsing) ParseGithubPullEvent(logger logging.SimpleLogging, pullEvent *github.PullRequestEvent) *MockEventParsing_ParseGithubPullEvent_OngoingVerification { + params := []pegomock.Param{logger, pullEvent} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubPullEvent", params, verifier.timeout) return &MockEventParsing_ParseGithubPullEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -882,17 +1063,21 @@ type MockEventParsing_ParseGithubPullEvent_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockEventParsing_ParseGithubPullEvent_OngoingVerification) GetCapturedArguments() *github.PullRequestEvent { - pullEvent := c.GetAllCapturedArguments() - return pullEvent[len(pullEvent)-1] +func (c *MockEventParsing_ParseGithubPullEvent_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, *github.PullRequestEvent) { + logger, pullEvent := c.GetAllCapturedArguments() + return logger[len(logger)-1], pullEvent[len(pullEvent)-1] } -func (c *MockEventParsing_ParseGithubPullEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []*github.PullRequestEvent) { +func (c *MockEventParsing_ParseGithubPullEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []*github.PullRequestEvent) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]*github.PullRequestEvent, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(*github.PullRequestEvent) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]*github.PullRequestEvent, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(*github.PullRequestEvent) } } return diff --git a/server/events/mocks/mock_github_pull_getter.go b/server/events/mocks/mock_github_pull_getter.go index cd38c859cd..7127f5cd9b 100644 --- a/server/events/mocks/mock_github_pull_getter.go +++ b/server/events/mocks/mock_github_pull_getter.go @@ -4,9 +4,10 @@ package mocks import ( - github "github.com/google/go-github/v58/github" + github "github.com/google/go-github/v65/github" pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -26,11 +27,11 @@ func NewMockGithubPullGetter(options ...pegomock.Option) *MockGithubPullGetter { func (mock *MockGithubPullGetter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockGithubPullGetter) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockGithubPullGetter) GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) { +func (mock *MockGithubPullGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*github.PullRequest, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockGithubPullGetter().") } - params := []pegomock.Param{repo, pullNum} + params := []pegomock.Param{logger, repo, pullNum} result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequest", params, []reflect.Type{reflect.TypeOf((**github.PullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 *github.PullRequest var ret1 error @@ -82,8 +83,8 @@ type VerifierMockGithubPullGetter struct { timeout time.Duration } -func (verifier *VerifierMockGithubPullGetter) GetPullRequest(repo models.Repo, pullNum int) *MockGithubPullGetter_GetPullRequest_OngoingVerification { - params := []pegomock.Param{repo, pullNum} +func (verifier *VerifierMockGithubPullGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) *MockGithubPullGetter_GetPullRequest_OngoingVerification { + params := []pegomock.Param{logger, repo, pullNum} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequest", params, verifier.timeout) return &MockGithubPullGetter_GetPullRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -93,21 +94,25 @@ type MockGithubPullGetter_GetPullRequest_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockGithubPullGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (models.Repo, int) { - repo, pullNum := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1] +func (c *MockGithubPullGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, int) { + logger, repo, pullNum := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pullNum[len(pullNum)-1] } -func (c *MockGithubPullGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int) { +func (c *MockGithubPullGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []int) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(models.Repo) + } + _param2 = make([]int, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(int) } } return diff --git a/server/events/mocks/mock_gitlab_merge_request_getter.go b/server/events/mocks/mock_gitlab_merge_request_getter.go index 76cc8565b7..cdb481741d 100644 --- a/server/events/mocks/mock_gitlab_merge_request_getter.go +++ b/server/events/mocks/mock_gitlab_merge_request_getter.go @@ -5,6 +5,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" + logging "github.com/runatlantis/atlantis/server/logging" go_gitlab "github.com/xanzy/go-gitlab" "reflect" "time" @@ -25,11 +26,11 @@ func NewMockGitlabMergeRequestGetter(options ...pegomock.Option) *MockGitlabMerg func (mock *MockGitlabMergeRequestGetter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockGitlabMergeRequestGetter) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockGitlabMergeRequestGetter) GetMergeRequest(repoFullName string, pullNum int) (*go_gitlab.MergeRequest, error) { +func (mock *MockGitlabMergeRequestGetter) GetMergeRequest(logger logging.SimpleLogging, repoFullName string, pullNum int) (*go_gitlab.MergeRequest, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockGitlabMergeRequestGetter().") } - params := []pegomock.Param{repoFullName, pullNum} + params := []pegomock.Param{logger, repoFullName, pullNum} result := pegomock.GetGenericMockFrom(mock).Invoke("GetMergeRequest", params, []reflect.Type{reflect.TypeOf((**go_gitlab.MergeRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 *go_gitlab.MergeRequest var ret1 error @@ -81,8 +82,8 @@ type VerifierMockGitlabMergeRequestGetter struct { timeout time.Duration } -func (verifier *VerifierMockGitlabMergeRequestGetter) GetMergeRequest(repoFullName string, pullNum int) *MockGitlabMergeRequestGetter_GetMergeRequest_OngoingVerification { - params := []pegomock.Param{repoFullName, pullNum} +func (verifier *VerifierMockGitlabMergeRequestGetter) GetMergeRequest(logger logging.SimpleLogging, repoFullName string, pullNum int) *MockGitlabMergeRequestGetter_GetMergeRequest_OngoingVerification { + params := []pegomock.Param{logger, repoFullName, pullNum} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetMergeRequest", params, verifier.timeout) return &MockGitlabMergeRequestGetter_GetMergeRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -92,21 +93,25 @@ type MockGitlabMergeRequestGetter_GetMergeRequest_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockGitlabMergeRequestGetter_GetMergeRequest_OngoingVerification) GetCapturedArguments() (string, int) { - repoFullName, pullNum := c.GetAllCapturedArguments() - return repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] +func (c *MockGitlabMergeRequestGetter_GetMergeRequest_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string, int) { + logger, repoFullName, pullNum := c.GetAllCapturedArguments() + return logger[len(logger)-1], repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] } -func (c *MockGitlabMergeRequestGetter_GetMergeRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []int) { +func (c *MockGitlabMergeRequestGetter_GetMergeRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string, _param2 []int) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(string) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]string, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(string) + } + _param2 = make([]int, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(int) } } return diff --git a/server/events/mocks/mock_pull_cleaner.go b/server/events/mocks/mock_pull_cleaner.go index 588f9b3d31..53fd1877fe 100644 --- a/server/events/mocks/mock_pull_cleaner.go +++ b/server/events/mocks/mock_pull_cleaner.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockPullCleaner(options ...pegomock.Option) *MockPullCleaner { func (mock *MockPullCleaner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockPullCleaner) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockPullCleaner) CleanUpPull(repo models.Repo, pull models.PullRequest) error { +func (mock *MockPullCleaner) CleanUpPull(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockPullCleaner().") } - params := []pegomock.Param{repo, pull} + params := []pegomock.Param{logger, repo, pull} result := pegomock.GetGenericMockFrom(mock).Invoke("CleanUpPull", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -77,8 +78,8 @@ type VerifierMockPullCleaner struct { timeout time.Duration } -func (verifier *VerifierMockPullCleaner) CleanUpPull(repo models.Repo, pull models.PullRequest) *MockPullCleaner_CleanUpPull_OngoingVerification { - params := []pegomock.Param{repo, pull} +func (verifier *VerifierMockPullCleaner) CleanUpPull(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) *MockPullCleaner_CleanUpPull_OngoingVerification { + params := []pegomock.Param{logger, repo, pull} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CleanUpPull", params, verifier.timeout) return &MockPullCleaner_CleanUpPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -88,21 +89,25 @@ type MockPullCleaner_CleanUpPull_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockPullCleaner_CleanUpPull_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - repo, pull := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1] +func (c *MockPullCleaner_CleanUpPull_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, repo, pull := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pull[len(pull)-1] } -func (c *MockPullCleaner_CleanUpPull_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockPullCleaner_CleanUpPull_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index 55ecc1ca4c..9c162fc4a2 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{headRepo, p, workspace} + params := []pegomock.Param{logger, headRepo, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -48,11 +49,11 @@ func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, wo return ret0, ret1, ret2 } -func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { +func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p} + params := []pegomock.Param{logger, r, p} result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -63,11 +64,11 @@ func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { return ret0 } -func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error { +func (mock *MockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace} + params := []pegomock.Param{logger, r, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteForWorkspace", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -78,11 +79,11 @@ func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullReque return ret0 } -func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error { +func (mock *MockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace, path, projectName} + params := []pegomock.Param{logger, r, p, workspace, path, projectName} result := pegomock.GetGenericMockFrom(mock).Invoke("DeletePlan", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -93,11 +94,11 @@ func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, work return ret0 } -func (mock *MockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) { +func (mock *MockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace} + params := []pegomock.Param{logger, r, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("GetGitUntrackedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 []string var ret1 error @@ -150,11 +151,11 @@ func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, w return ret0, ret1 } -func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool { +func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) bool { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{cloneDir} + params := []pegomock.Param{logger, cloneDir} result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) var ret0 bool if len(result) != 0 { @@ -210,8 +211,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{headRepo, p, workspace} +func (verifier *VerifierMockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{logger, headRepo, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -221,32 +222,36 @@ type MockWorkingDir_Clone_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - headRepo, p, workspace := c.GetAllCapturedArguments() - return headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, headRepo, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return } -func (verifier *VerifierMockWorkingDir) Delete(r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { - params := []pegomock.Param{r, p} +func (verifier *VerifierMockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { + params := []pegomock.Param{logger, r, p} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params, verifier.timeout) return &MockWorkingDir_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -256,28 +261,32 @@ type MockWorkingDir_Delete_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - r, p := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1] +func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, r, p := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1] } -func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { - params := []pegomock.Param{r, p, workspace} +func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteForWorkspace", params, verifier.timeout) return &MockWorkingDir_DeleteForWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -287,32 +296,36 @@ type MockWorkingDir_DeleteForWorkspace_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, r, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return } -func (verifier *VerifierMockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification { - params := []pegomock.Param{r, p, workspace, path, projectName} +func (verifier *VerifierMockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace, path, projectName} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeletePlan", params, verifier.timeout) return &MockWorkingDir_DeletePlan_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -322,25 +335,25 @@ type MockWorkingDir_DeletePlan_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string, string, string) { - r, p, workspace, path, projectName := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1] +func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, string, string) { + logger, r, p, workspace, path, projectName := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1] } -func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string, _param3 []string, _param4 []string) { +func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _param4 []string, _param5 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) } _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { @@ -350,12 +363,16 @@ func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments( for u, param := range params[4] { _param4[u] = param.(string) } + _param5 = make([]string, len(c.methodInvocations)) + for u, param := range params[5] { + _param5[u] = param.(string) + } } return } -func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { - params := []pegomock.Param{r, p, workspace} +func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetGitUntrackedFiles", params, verifier.timeout) return &MockWorkingDir_GetGitUntrackedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -365,25 +382,29 @@ type MockWorkingDir_GetGitUntrackedFiles_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, r, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return @@ -455,8 +476,8 @@ func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArgumen return } -func (verifier *VerifierMockWorkingDir) HasDiverged(cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification { - params := []pegomock.Param{cloneDir} +func (verifier *VerifierMockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification { + params := []pegomock.Param{logger, cloneDir} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HasDiverged", params, verifier.timeout) return &MockWorkingDir_HasDiverged_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -466,17 +487,21 @@ type MockWorkingDir_HasDiverged_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() string { - cloneDir := c.GetAllCapturedArguments() - return cloneDir[len(cloneDir)-1] +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) { + logger, cloneDir := c.GetAllCapturedArguments() + return logger[len(logger)-1], cloneDir[len(cloneDir)-1] } -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(string) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(string) } } return diff --git a/server/events/models/models.go b/server/events/models/models.go index b98d93e554..f7bd4790db 100644 --- a/server/events/models/models.go +++ b/server/events/models/models.go @@ -185,6 +185,9 @@ type PullRequestOptions struct { // When DeleteSourceBranchOnMerge flag is set to true VCS deletes the source branch after the PR is merged // Applied by GitLab & AzureDevops DeleteSourceBranchOnMerge bool + // MergeMethod specifies the merge method for the VCS + // Implemented only for Github + MergeMethod string } type PullRequestState int @@ -221,6 +224,7 @@ func (p PullRequestEventType) String() string { // During an autoplan, the user will be the Atlantis API user. type User struct { Username string + Teams []string } // ProjectLock represents a lock on a project. @@ -304,6 +308,7 @@ const ( BitbucketCloud BitbucketServer AzureDevops + Gitea ) func (h VCSHostType) String() string { @@ -318,6 +323,8 @@ func (h VCSHostType) String() string { return "BitbucketServer" case AzureDevops: return "AzureDevops" + case Gitea: + return "Gitea" } return "" } @@ -334,6 +341,8 @@ func NewVCSHostType(t string) (VCSHostType, error) { return BitbucketServer, nil case "AzureDevops": return AzureDevops, nil + case "Gitea": + return Gitea, nil } return -1, fmt.Errorf("%q is not a valid type", t) @@ -610,6 +619,56 @@ func (p ProjectPlanStatus) String() string { } } +// TeamAllowlistCheckerContext defines the context for a TeamAllowlistChecker to verify +// command permissions. +type TeamAllowlistCheckerContext struct { + // BaseRepo is the repository that the pull request will be merged into. + BaseRepo Repo + + // The name of the command that is being executed, i.e. 'plan', 'apply' etc. + CommandName string + + // EscapedCommentArgs are the extra arguments that were added to the atlantis + // command, ex. atlantis plan -- -target=resource. We then escape them + // by adding a \ before each character so that they can be used within + // sh -c safely, i.e. sh -c "terraform plan $(touch bad)". + EscapedCommentArgs []string + + // HeadRepo is the repository that is getting merged into the BaseRepo. + // If the pull request branch is from the same repository then HeadRepo will + // be the same as BaseRepo. + HeadRepo Repo + + // Log is a logger that's been set up for this context. + Log logging.SimpleLogging + + // Pull is the pull request we're responding to. + Pull PullRequest + + // ProjectName is the name of the project set in atlantis.yaml. If there was + // no name this will be an empty string. + ProjectName string + + // RepoDir is the absolute path to the repo root + RepoDir string + + // RepoRelDir is the directory of this project relative to the repo root. + RepoRelDir string + + // User is the user that triggered this command. + User User + + // Verbose is true when the user would like verbose output. + Verbose bool + + // Workspace is the Terraform workspace this project is in. It will always + // be set. + Workspace string + + // API is true if plan/apply by API endpoints + API bool +} + // WorkflowHookCommandContext defines the context for a pre and post worklfow_hooks that will // be executed before workflows. type WorkflowHookCommandContext struct { @@ -648,6 +707,8 @@ type WorkflowHookCommandContext struct { // Workspace is the Terraform workspace this project is in. It will always // be set. Workspace string + // API is true if plan/apply by API endpoints + API bool } // PlanSuccessStats holds stats for a plan. diff --git a/server/events/modules_test.go b/server/events/modules_test.go index 3f7770c033..e5cc9798c6 100644 --- a/server/events/modules_test.go +++ b/server/events/modules_test.go @@ -8,6 +8,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) //go:embed testdata/fs @@ -20,9 +21,9 @@ func Test_findModuleDependants(t *testing.T) { autoplanModuleDependants string } a, err := fs.Sub(repos, "testdata/fs/repoA") - assert.NoError(t, err) + require.NoError(t, err) b, err := fs.Sub(repos, "testdata/fs/repoB") - assert.NoError(t, err) + require.NoError(t, err) tests := []struct { name string diff --git a/server/events/pending_plan_finder.go b/server/events/pending_plan_finder.go index 1a621fb130..9a26866b1a 100644 --- a/server/events/pending_plan_finder.go +++ b/server/events/pending_plan_finder.go @@ -8,6 +8,7 @@ import ( "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/core/runtime" + "github.com/runatlantis/atlantis/server/utils" ) //go:generate pegomock generate --package mocks -o mocks/mock_pending_plan_finder.go PendingPlanFinder @@ -58,8 +59,8 @@ func (p *DefaultPendingPlanFinder) findWithAbsPaths(pullDir string) ([]PendingPl lsCmd.Dir = repoDir lsOut, err := lsCmd.CombinedOutput() if err != nil { - return nil, nil, errors.Wrapf(err, "running git ls-files . "+ - "--others: %s", string(lsOut)) + return nil, nil, errors.Wrapf(err, "running 'git ls-files . --others' in '%s' directory: %s", + repoDir, string(lsOut)) } for _, file := range strings.Split(string(lsOut), "\n") { if filepath.Ext(file) == ".tfplan" { @@ -92,7 +93,7 @@ func (p *DefaultPendingPlanFinder) DeletePlans(pullDir string) error { return err } for _, path := range absPaths { - if err := os.Remove(path); err != nil { + if err := utils.RemoveIgnoreNonExistent(path); err != nil { return errors.Wrapf(err, "delete plan at %s", path) } } diff --git a/server/events/pending_plan_finder_test.go b/server/events/pending_plan_finder_test.go index cecc8c9bf2..7319e71f1a 100644 --- a/server/events/pending_plan_finder_test.go +++ b/server/events/pending_plan_finder_test.go @@ -1,6 +1,7 @@ package events_test import ( + "fmt" "os" "os/exec" "path/filepath" @@ -18,6 +19,28 @@ func TestPendingPlanFinder_FindNoDir(t *testing.T) { ErrEquals(t, "open /doesntexist: no such file or directory", err) } +// If one of the dir in PR dir is not git dir then it should throw an error. +func TestPendingPlanFinder_FindIncludingNotGitDir(t *testing.T) { + gitDirName := ".default" + notGitDirName := ".terragrunt-cache" + tmpDir := DirStructure(t, map[string]interface{}{ + gitDirName: map[string]interface{}{ + "default.tfplan": nil, + }, + notGitDirName: map[string]interface{}{ + "some_file.tfplan": nil, + }, + }) + // Initialize git in 'default' directory + gitDir := filepath.Join(tmpDir, gitDirName) + runCmd(t, gitDir, "git", "init") + pf := &events.DefaultPendingPlanFinder{} + + _, err := pf.Find(tmpDir) + ErrEquals(t, fmt.Sprintf("running 'git ls-files . --others' in '%s/%s' directory: fatal: "+ + "not a git repository (or any of the parent directories): .git\n: exit status 128", tmpDir, notGitDirName), err) +} + // Test different directory structures. func TestPendingPlanFinder_Find(t *testing.T) { cases := []struct { diff --git a/server/events/plan_command_runner.go b/server/events/plan_command_runner.go index 044a594233..c2b6b7a107 100644 --- a/server/events/plan_command_runner.go +++ b/server/events/plan_command_runner.go @@ -76,6 +76,7 @@ type PlanCommandRunner struct { // a plan. DiscardApprovalOnPlan bool pullReqStatusFetcher vcs.PullReqStatusFetcher + SilencePRComments []string } func (p *PlanCommandRunner) runAutoplan(ctx *command.Context) { @@ -84,7 +85,7 @@ func (p *PlanCommandRunner) runAutoplan(ctx *command.Context) { projectCmds, err := p.prjCmdBuilder.BuildAutoplanCommands(ctx) if err != nil { - if statusErr := p.commitStatusUpdater.UpdateCombined(baseRepo, pull, models.FailedCommitStatus, command.Plan); statusErr != nil { + if statusErr := p.commitStatusUpdater.UpdateCombined(ctx.Log, baseRepo, pull, models.FailedCommitStatus, command.Plan); statusErr != nil { ctx.Log.Warn("unable to update commit status: %s", statusErr) } p.pullUpdater.updatePull(ctx, AutoplanCommand{}, command.Result{Error: err}) @@ -100,13 +101,13 @@ func (p *PlanCommandRunner) runAutoplan(ctx *command.Context) { // with 0/0 projects planned/policy_checked/applied successfully because some users require // the Atlantis status to be passing for all pull requests. ctx.Log.Debug("setting VCS status to success with no projects found") - if err := p.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.Plan, 0, 0); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.Plan, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } - if err := p.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } - if err := p.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.Apply, 0, 0); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.Apply, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } } @@ -114,7 +115,7 @@ func (p *PlanCommandRunner) runAutoplan(ctx *command.Context) { } // At this point we are sure Atlantis has work to do, so set commit status to pending - if err := p.commitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, command.Plan); err != nil { + if err := p.commitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, command.Plan); err != nil { ctx.Log.Warn("unable to update plan commit status: %s", err) } @@ -172,7 +173,7 @@ func (p *PlanCommandRunner) run(ctx *command.Context, cmd *CommentCommand) { baseRepo := ctx.Pull.BaseRepo pull := ctx.Pull - ctx.PullRequestStatus, err = p.pullReqStatusFetcher.FetchPullStatus(pull) + ctx.PullRequestStatus, err = p.pullReqStatusFetcher.FetchPullStatus(ctx.Log, pull) if err != nil { // On error we continue the request with mergeable assumed false. // We want to continue because not all apply's will need this status, @@ -187,13 +188,13 @@ func (p *PlanCommandRunner) run(ctx *command.Context, cmd *CommentCommand) { } } - if err = p.commitStatusUpdater.UpdateCombined(baseRepo, pull, models.PendingCommitStatus, command.Plan); err != nil { + if err = p.commitStatusUpdater.UpdateCombined(ctx.Log, baseRepo, pull, models.PendingCommitStatus, command.Plan); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } projectCmds, err := p.prjCmdBuilder.BuildPlanCommands(ctx, cmd) if err != nil { - if statusErr := p.commitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Plan); statusErr != nil { + if statusErr := p.commitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.FailedCommitStatus, command.Plan); statusErr != nil { ctx.Log.Warn("unable to update commit status: %s", statusErr) } p.pullUpdater.updatePull(ctx, cmd, command.Result{Error: err}) @@ -213,7 +214,7 @@ func (p *PlanCommandRunner) run(ctx *command.Context, cmd *CommentCommand) { if pullStatus == nil { // default to 0/0 ctx.Log.Debug("setting VCS status to 0/0 success as no previous state was found") - if err := p.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.Plan, 0, 0); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.Plan, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } return @@ -226,13 +227,13 @@ func (p *PlanCommandRunner) run(ctx *command.Context, cmd *CommentCommand) { // the Atlantis status to be passing for all pull requests. // Does not apply to skipped runs for specific projects ctx.Log.Debug("setting VCS status to success with no projects found") - if err := p.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.Plan, 0, 0); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.Plan, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } - if err := p.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } - if err := p.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.Apply, 0, 0); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.Apply, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } } @@ -293,7 +294,7 @@ func (p *PlanCommandRunner) run(ctx *command.Context, cmd *CommentCommand) { // with 0/0 projects planned/policy_checked/applied successfully because some users require // the Atlantis status to be passing for all pull requests. ctx.Log.Debug("setting VCS status to success with no projects found") - if err := p.commitStatusUpdater.UpdateCombinedCount(baseRepo, pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, baseRepo, pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } } @@ -335,6 +336,7 @@ func (p *PlanCommandRunner) updateCommitStatus(ctx *command.Context, pullStatus } if err := p.commitStatusUpdater.UpdateCombinedCount( + ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, status, diff --git a/server/events/plan_command_runner_test.go b/server/events/plan_command_runner_test.go index 6ccac2f2e8..6944c28776 100644 --- a/server/events/plan_command_runner_test.go +++ b/server/events/plan_command_runner_test.go @@ -4,7 +4,7 @@ import ( "errors" "testing" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" . "github.com/petergtz/pegomock/v4" "github.com/runatlantis/atlantis/server/core/db" "github.com/runatlantis/atlantis/server/events" @@ -128,9 +128,11 @@ func TestPlanCommandRunner_IsSilenced(t *testing.T) { timesComment = 0 } - vcsClient.VerifyWasCalled(Times(timesComment)).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + vcsClient.VerifyWasCalled(Times(timesComment)).CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) if c.ExpVCSStatusSet { commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq[models.CommitStatus](models.SuccessCommitStatus), @@ -140,6 +142,7 @@ func TestPlanCommandRunner_IsSilenced(t *testing.T) { ) } else { commitUpdater.VerifyWasCalled(Never()).UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[models.CommitStatus](), @@ -484,8 +487,8 @@ func TestPlanCommandRunner_ExecutionOrder(t *testing.T) { Trigger: command.CommentTrigger, } - When(githubGetter.GetPullRequest(testdata.GithubRepo, testdata.Pull.Num)).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) When(projectCommandBuilder.BuildPlanCommands(ctx, cmd)).ThenReturn(c.ProjectContexts, nil) // When(projectCommandBuilder.BuildPlanCommands(ctx, cmd)).Then(func(args []Param) ReturnValues { @@ -502,7 +505,7 @@ func TestPlanCommandRunner_ExecutionOrder(t *testing.T) { } vcsClient.VerifyWasCalledOnce().CreateComment( - Any[models.Repo](), Eq(modelPull.Num), Any[string](), Eq("plan"), + Any[logging.SimpleLogging](), Any[models.Repo](), Eq(modelPull.Num), Any[string](), Eq("plan"), ) }) } @@ -739,7 +742,7 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { planCommandRunner.Run(ctx, cmd) - vcsClient.VerifyWasCalledOnce().CreateComment(Any[models.Repo](), AnyInt(), AnyString(), AnyString()) + vcsClient.VerifyWasCalledOnce().CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), AnyInt(), AnyString(), AnyString()) ExpCommitStatus := models.SuccessCommitStatus if c.ExpVCSApplyStatusSucc != c.ExpVCSApplyStatusTotal { @@ -747,6 +750,7 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { } if c.DoNotUpdateApply { commitUpdater.VerifyWasCalled(Never()).UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[models.CommitStatus](), @@ -756,6 +760,7 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { ) } else { commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Eq[models.CommitStatus](ExpCommitStatus), diff --git a/server/events/policy_check_command_runner.go b/server/events/policy_check_command_runner.go index c0283aae3e..2f76237f4d 100644 --- a/server/events/policy_check_command_runner.go +++ b/server/events/policy_check_command_runner.go @@ -45,7 +45,7 @@ func (p *PolicyCheckCommandRunner) Run(ctx *command.Context, cmds []command.Proj // with 0/0 projects policy_checked successfully because some users require // the Atlantis status to be passing for all pull requests. ctx.Log.Debug("setting VCS status to success with no projects found") - if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Pull.BaseRepo, ctx.Pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.SuccessCommitStatus, command.PolicyCheck, 0, 0); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } } @@ -53,7 +53,7 @@ func (p *PolicyCheckCommandRunner) Run(ctx *command.Context, cmds []command.Proj } // So set policy_check commit status to pending - if err := p.commitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, command.PolicyCheck); err != nil { + if err := p.commitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, command.PolicyCheck); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } @@ -90,7 +90,7 @@ func (p *PolicyCheckCommandRunner) updateCommitStatus(ctx *command.Context, pull status = models.FailedCommitStatus } - if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Pull.BaseRepo, ctx.Pull, status, command.PolicyCheck, numSuccess, len(pullStatus.Projects)); err != nil { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, status, command.PolicyCheck, numSuccess, len(pullStatus.Projects)); err != nil { ctx.Log.Warn("unable to update commit status: %s", err) } } diff --git a/server/events/post_workflow_hooks_command_runner.go b/server/events/post_workflow_hooks_command_runner.go index 4a7a587292..f5fe0c5245 100644 --- a/server/events/post_workflow_hooks_command_runner.go +++ b/server/events/post_workflow_hooks_command_runner.go @@ -37,18 +37,10 @@ type DefaultPostWorkflowHooksCommandRunner struct { } // RunPostHooks runs post_workflow_hooks after a plan/apply has completed -func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks( - ctx *command.Context, cmd *CommentCommand, -) error { - pull := ctx.Pull - baseRepo := pull.BaseRepo - headRepo := ctx.HeadRepo - user := ctx.User - log := ctx.Log - +func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks(ctx *command.Context, cmd *CommentCommand) error { postWorkflowHooks := make([]*valid.WorkflowHook, 0) for _, repo := range w.GlobalCfg.Repos { - if repo.IDMatches(baseRepo.ID()) && repo.BranchMatches(pull.BaseBranch) && len(repo.PostWorkflowHooks) > 0 { + if repo.IDMatches(ctx.Pull.BaseRepo.ID()) && repo.BranchMatches(ctx.Pull.BaseBranch) && len(repo.PostWorkflowHooks) > 0 { postWorkflowHooks = append(postWorkflowHooks, repo.PostWorkflowHooks...) } } @@ -58,16 +50,16 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks( return nil } - log.Debug("post-hooks configured, running...") + ctx.Log.Debug("post-hooks configured, running...") - unlockFn, err := w.WorkingDirLocker.TryLock(baseRepo.FullName, pull.Num, DefaultWorkspace, DefaultRepoRelDir) + unlockFn, err := w.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, DefaultWorkspace, DefaultRepoRelDir) if err != nil { return err } - log.Debug("got workspace lock") + ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(headRepo, pull, DefaultWorkspace) + repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return err } @@ -79,14 +71,15 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks( err = w.runHooks( models.WorkflowHookCommandContext{ - BaseRepo: baseRepo, - HeadRepo: headRepo, - Log: log, - Pull: pull, - User: user, + BaseRepo: ctx.Pull.BaseRepo, + HeadRepo: ctx.HeadRepo, + Log: ctx.Log, + Pull: ctx.Pull, + User: ctx.User, Verbose: false, EscapedCommentArgs: escapedArgs, CommandName: cmd.Name.String(), + API: ctx.API, }, postWorkflowHooks, repoDir) @@ -123,33 +116,33 @@ func (w *DefaultPostWorkflowHooksCommandRunner) runHooks( ctx.HookID = uuid.NewString() shell := hook.Shell if shell == "" { - ctx.Log.Debug("Setting shell to default: %q", shell) + ctx.Log.Debug("Setting shell to default: '%s'", shell) shell = "sh" } shellArgs := hook.ShellArgs if shellArgs == "" { - ctx.Log.Debug("Setting shellArgs to default: %q", shellArgs) + ctx.Log.Debug("Setting shellArgs to default: '%s'", shellArgs) shellArgs = "-c" } url, err := w.Router.GenerateProjectWorkflowHookURL(ctx.HookID) - if err != nil { + if err != nil && !ctx.API { return err } - if err := w.CommitStatusUpdater.UpdatePostWorkflowHook(ctx.Pull, models.PendingCommitStatus, ctx.HookDescription, "", url); err != nil { + if err := w.CommitStatusUpdater.UpdatePostWorkflowHook(ctx.Log, ctx.Pull, models.PendingCommitStatus, ctx.HookDescription, "", url); err != nil { ctx.Log.Warn("unable to update post workflow hook status: %s", err) } _, runtimeDesc, err := w.PostWorkflowHookRunner.Run(ctx, hook.RunCommand, shell, shellArgs, repoDir) if err != nil { - if err := w.CommitStatusUpdater.UpdatePostWorkflowHook(ctx.Pull, models.FailedCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { + if err := w.CommitStatusUpdater.UpdatePostWorkflowHook(ctx.Log, ctx.Pull, models.FailedCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { ctx.Log.Warn("unable to update post workflow hook status: %s", err) } return err } - if err := w.CommitStatusUpdater.UpdatePostWorkflowHook(ctx.Pull, models.SuccessCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { + if err := w.CommitStatusUpdater.UpdatePostWorkflowHook(ctx.Log, ctx.Pull, models.SuccessCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { ctx.Log.Warn("unable to update post workflow hook status: %s", err) } } diff --git a/server/events/post_workflow_hooks_command_runner_test.go b/server/events/post_workflow_hooks_command_runner_test.go index 38cd5ee9ec..29996d8028 100644 --- a/server/events/post_workflow_hooks_command_runner_test.go +++ b/server/events/post_workflow_hooks_command_runner_test.go @@ -140,8 +140,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -180,7 +182,8 @@ func TestRunPostHooks_Clone(t *testing.T) { whPostWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) postWhWorkingDirLocker.VerifyWasCalled(Never()).TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, "path") - postWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace) + postWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace)) }) t.Run("error locking work dir", func(t *testing.T) { postWorkflowHooksSetup(t) @@ -198,12 +201,14 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error")) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error")) err := postWh.RunPostHooks(ctx, planCmd) Assert(t, err != nil, "error not nil") - postWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace) + postWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace)) whPostWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) }) @@ -229,8 +234,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, errors.New("some error")) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error")) err := postWh.RunPostHooks(ctx, planCmd) @@ -262,8 +269,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) @@ -302,8 +311,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -336,8 +347,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -370,8 +383,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -404,16 +419,19 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := postWh.RunPostHooks(ctx, planCmd) Ok(t, err) whPostWorkflowHookRunner.VerifyWasCalledOnce().Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithShellandShellArgs.RunCommand), Eq(testHookWithShellandShellArgs.Shell), Eq(testHookWithShellandShellArgs.ShellArgs), Eq(repoDir)) + Eq(testHookWithShellandShellArgs.RunCommand), Eq(testHookWithShellandShellArgs.Shell), + Eq(testHookWithShellandShellArgs.ShellArgs), Eq(repoDir)) Assert(t, *unlockCalled == true, "unlock function called") }) @@ -438,8 +456,10 @@ func TestRunPostHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -472,10 +492,12 @@ func TestRunPostHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, applyCmd) @@ -506,10 +528,12 @@ func TestRunPostHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) diff --git a/server/events/pre_workflow_hooks_command_runner.go b/server/events/pre_workflow_hooks_command_runner.go index e73f0c5037..70462765a3 100644 --- a/server/events/pre_workflow_hooks_command_runner.go +++ b/server/events/pre_workflow_hooks_command_runner.go @@ -38,15 +38,9 @@ type DefaultPreWorkflowHooksCommandRunner struct { // RunPreHooks runs pre_workflow_hooks when PR is opened or updated. func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, cmd *CommentCommand) error { - pull := ctx.Pull - baseRepo := pull.BaseRepo - headRepo := ctx.HeadRepo - user := ctx.User - log := ctx.Log - preWorkflowHooks := make([]*valid.WorkflowHook, 0) for _, repo := range w.GlobalCfg.Repos { - if repo.IDMatches(baseRepo.ID()) && len(repo.PreWorkflowHooks) > 0 { + if repo.IDMatches(ctx.Pull.BaseRepo.ID()) && len(repo.PreWorkflowHooks) > 0 { preWorkflowHooks = append(preWorkflowHooks, repo.PreWorkflowHooks...) } } @@ -56,16 +50,16 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, return nil } - log.Debug("pre-hooks configured, running...") + ctx.Log.Debug("pre-hooks configured, running...") - unlockFn, err := w.WorkingDirLocker.TryLock(baseRepo.FullName, pull.Num, DefaultWorkspace, DefaultRepoRelDir) + unlockFn, err := w.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, DefaultWorkspace, DefaultRepoRelDir) if err != nil { return err } - log.Debug("got workspace lock") + ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(headRepo, pull, DefaultWorkspace) + repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return err } @@ -78,25 +72,26 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, // Update the plan or apply commit status to pending whilst the pre workflow hook is running switch cmd.Name { case command.Plan: - if err := w.CommitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, command.Plan); err != nil { + if err := w.CommitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, command.Plan); err != nil { ctx.Log.Warn("unable to update plan commit status: %s", err) } case command.Apply: - if err := w.CommitStatusUpdater.UpdateCombined(ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, command.Apply); err != nil { + if err := w.CommitStatusUpdater.UpdateCombined(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, command.Apply); err != nil { ctx.Log.Warn("unable to update apply commit status: %s", err) } } err = w.runHooks( models.WorkflowHookCommandContext{ - BaseRepo: baseRepo, - HeadRepo: headRepo, - Log: log, - Pull: pull, - User: user, + BaseRepo: ctx.Pull.BaseRepo, + HeadRepo: ctx.HeadRepo, + Log: ctx.Log, + Pull: ctx.Pull, + User: ctx.User, Verbose: false, EscapedCommentArgs: escapedArgs, CommandName: cmd.Name.String(), + API: ctx.API, }, preWorkflowHooks, repoDir) @@ -132,36 +127,42 @@ func (w *DefaultPreWorkflowHooksCommandRunner) runHooks( ctx.HookID = uuid.NewString() shell := hook.Shell if shell == "" { - ctx.Log.Debug("Setting shell to default: %q", shell) + ctx.Log.Debug("Setting shell to default: '%s'", shell) shell = "sh" } shellArgs := hook.ShellArgs if shellArgs == "" { - ctx.Log.Debug("Setting shellArgs to default: %q", shellArgs) + ctx.Log.Debug("Setting shellArgs to default: '%s'", shellArgs) shellArgs = "-c" } url, err := w.Router.GenerateProjectWorkflowHookURL(ctx.HookID) - if err != nil { + if err != nil && !ctx.API { return err } - if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Pull, models.PendingCommitStatus, ctx.HookDescription, "", url); err != nil { + if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Log, ctx.Pull, models.PendingCommitStatus, ctx.HookDescription, "", url); err != nil { ctx.Log.Warn("unable to update pre workflow hook status: %s", err) - return err + ctx.Log.Info("is api? %v", ctx.API) + if !ctx.API { + ctx.Log.Info("is api? %v", ctx.API) + return err + } } _, runtimeDesc, err := w.PreWorkflowHookRunner.Run(ctx, hook.RunCommand, shell, shellArgs, repoDir) if err != nil { - if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Pull, models.FailedCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { + if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Log, ctx.Pull, models.FailedCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { ctx.Log.Warn("unable to update pre workflow hook status: %s", err) } return err } - if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Pull, models.SuccessCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { + if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Log, ctx.Pull, models.SuccessCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { ctx.Log.Warn("unable to update pre workflow hook status: %s", err) - return err + if !ctx.API { + return err + } } } diff --git a/server/events/pre_workflow_hooks_command_runner_test.go b/server/events/pre_workflow_hooks_command_runner_test.go index 3156797f86..191a8c27dc 100644 --- a/server/events/pre_workflow_hooks_command_runner_test.go +++ b/server/events/pre_workflow_hooks_command_runner_test.go @@ -142,8 +142,10 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -180,9 +182,11 @@ func TestRunPreHooks_Clone(t *testing.T) { Ok(t, err) - whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) + whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), + Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) preWhWorkingDirLocker.VerifyWasCalled(Never()).TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, "") - preWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace) + preWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace)) }) t.Run("error locking work dir", func(t *testing.T) { @@ -201,13 +205,16 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error")) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error")) err := preWh.RunPreHooks(ctx, planCmd) Assert(t, err != nil, "error not nil") - preWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace) - whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) + preWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace)) + whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), + Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) }) t.Run("error cloning", func(t *testing.T) { @@ -231,14 +238,17 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, errors.New("some error")) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error")) err := preWh.RunPreHooks(ctx, planCmd) Assert(t, err != nil, "error not nil") - whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) + whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), + Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) Assert(t, *unlockCalled == true, "unlock function called") }) @@ -263,8 +273,10 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) @@ -303,14 +315,18 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), + Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) Ok(t, err) - whPreWorkflowHookRunner.VerifyWasCalledOnce().Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) + whPreWorkflowHookRunner.VerifyWasCalledOnce().Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), + Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) Assert(t, *unlockCalled == true, "unlock function called") }) @@ -335,8 +351,10 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -369,8 +387,10 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -403,10 +423,12 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) @@ -438,10 +460,12 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) @@ -472,10 +496,12 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, applyCmd) @@ -506,10 +532,12 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go index 6868635324..35e540a858 100644 --- a/server/events/project_command_builder.go +++ b/server/events/project_command_builder.go @@ -4,6 +4,7 @@ import ( "fmt" "os" "path/filepath" + "slices" "sort" "strings" @@ -36,6 +37,7 @@ const ( ) func NewInstrumentedProjectCommandBuilder( + logger logging.SimpleLogging, policyChecksSupported bool, parserValidator *config.ParserValidator, projectFinder ProjectFinder, @@ -57,7 +59,6 @@ func NewInstrumentedProjectCommandBuilder( IncludeGitUntrackedFiles bool, AutoDiscoverMode string, scope tally.Scope, - logger logging.SimpleLogging, terraformClient terraform.Client, ) *InstrumentedProjectCommandBuilder { scope = scope.SubScope("builder") @@ -89,7 +90,6 @@ func NewInstrumentedProjectCommandBuilder( IncludeGitUntrackedFiles, AutoDiscoverMode, scope, - logger, terraformClient, ), Logger: logger, @@ -119,7 +119,6 @@ func NewProjectCommandBuilder( IncludeGitUntrackedFiles bool, AutoDiscoverMode string, scope tally.Scope, - _ logging.SimpleLogging, terraformClient terraform.Client, ) *DefaultProjectCommandBuilder { return &DefaultProjectCommandBuilder{ @@ -262,7 +261,7 @@ func (p *DefaultProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Contex var autoplanEnabled []command.ProjectContext for _, projCtx := range projCtxs { if !projCtx.AutoplanEnabled { - ctx.Log.Debug("ignoring project at dir %q, workspace: %q because autoplan is disabled", projCtx.RepoRelDir, projCtx.Workspace) + ctx.Log.Debug("ignoring project at dir '%s', workspace: '%s' because autoplan is disabled", projCtx.RepoRelDir, projCtx.Workspace) continue } autoplanEnabled = append(autoplanEnabled, projCtx) @@ -278,8 +277,7 @@ func (p *DefaultProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, c } ctx.Log.Debug("Building plan command for specific project with directory: '%v', workspace: '%v', project: '%v'", cmd.RepoRelDir, cmd.Workspace, cmd.ProjectName) - pcc, err := p.buildProjectPlanCommand(ctx, cmd) - return pcc, err + return p.buildProjectPlanCommand(ctx, cmd) } // See ProjectCommandBuilder.BuildApplyCommands. @@ -287,24 +285,21 @@ func (p *DefaultProjectCommandBuilder) BuildApplyCommands(ctx *command.Context, if !cmd.IsForSpecificProject() { return p.buildAllProjectCommandsByPlan(ctx, cmd) } - pac, err := p.buildProjectCommand(ctx, cmd) - return pac, err + return p.buildProjectCommand(ctx, cmd) } func (p *DefaultProjectCommandBuilder) BuildApprovePoliciesCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { if !cmd.IsForSpecificProject() { return p.buildAllProjectCommandsByPlan(ctx, cmd) } - pac, err := p.buildProjectCommand(ctx, cmd) - return pac, err + return p.buildProjectCommand(ctx, cmd) } func (p *DefaultProjectCommandBuilder) BuildVersionCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { if !cmd.IsForSpecificProject() { return p.buildAllProjectCommandsByPlan(ctx, cmd) } - pac, err := p.buildProjectCommand(ctx, cmd) - return pac, err + return p.buildProjectCommand(ctx, cmd) } func (p *DefaultProjectCommandBuilder) BuildImportCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { @@ -327,14 +322,14 @@ func (p *DefaultProjectCommandBuilder) BuildStateRmCommands(ctx *command.Context // modified in this ctx. func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Context, cmdName command.Name, subCmdName string, commentFlags []string, verbose bool) ([]command.ProjectContext, error) { // We'll need the list of modified files. - modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.Pull.BaseRepo, ctx.Pull) + modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull) if err != nil { return nil, err } if p.IncludeGitUntrackedFiles { ctx.Log.Debug(("'include-git-untracked-files' option is set, getting untracked files")) - untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return nil, err } @@ -352,7 +347,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex if p.SkipCloneNoChanges && p.VCSClient.SupportsSingleFileDownload(ctx.Pull.BaseRepo) { repoCfgFile := p.GlobalCfg.RepoConfigFile(ctx.Pull.BaseRepo.ID()) - hasRepoCfg, repoCfgData, err := p.VCSClient.GetFileContent(ctx.Pull, repoCfgFile) + hasRepoCfg, repoCfgData, err := p.VCSClient.GetFileContent(ctx.Log, ctx.Pull, repoCfgFile) if err != nil { return nil, errors.Wrapf(err, "downloading %s", repoCfgFile) } @@ -402,7 +397,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, workspace) + repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return nil, err } @@ -411,7 +406,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex repoCfgFile := p.GlobalCfg.RepoConfigFile(ctx.Pull.BaseRepo.ID()) hasRepoCfg, err := p.ParserValidator.HasRepoCfg(repoDir, repoCfgFile) if err != nil { - return nil, errors.Wrapf(err, "looking for %s file in %q", repoCfgFile, repoDir) + return nil, errors.Wrapf(err, "looking for '%s' file in '%s'", repoCfgFile, repoDir) } var projCtxs []command.ProjectContext @@ -440,7 +435,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex if err != nil { ctx.Log.Warn("error(s) loading project module dependencies: %s", err) } - ctx.Log.Debug("moduleInfo for %s (matching %q) = %v", repoDir, p.AutoDetectModuleFiles, moduleInfo) + ctx.Log.Debug("moduleInfo for '%s' (matching '%s') = %v", repoDir, p.AutoDetectModuleFiles, moduleInfo) automerge := p.EnableAutoMerge parallelApply := p.EnableParallelApply @@ -467,7 +462,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex ctx.Log.Info("%d projects are to be planned based on their when_modified config", len(matchingProjects)) for _, mp := range matchingProjects { - ctx.Log.Debug("determining config for project at dir: %q workspace: %q", mp.Dir, mp.Workspace) + ctx.Log.Debug("determining config for project at dir: '%s' workspace: '%s'", mp.Dir, mp.Workspace) mergedCfg := p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp, repoCfg) projCtxs = append(projCtxs, @@ -523,10 +518,11 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex ctx.Log.Info("automatically determined that there were %d additional projects modified in this pull request: %s", len(modifiedProjects), modifiedProjects) for _, mp := range modifiedProjects { - ctx.Log.Debug("determining config for project at dir: %q", mp.Path) - pWorkspace, err := p.ProjectFinder.DetermineWorkspaceFromHCL(ctx.Log, repoDir) + ctx.Log.Debug("determining config for project at dir: '%s'", mp.Path) + absProjectDir := filepath.Join(repoDir, mp.Path) + pWorkspace, err := p.ProjectFinder.DetermineWorkspaceFromHCL(ctx.Log, absProjectDir) if err != nil { - return nil, errors.Wrapf(err, "looking for Terraform Cloud workspace from configuration %s", repoDir) + return nil, errors.Wrapf(err, "looking for Terraform Cloud workspace from configuration %s", absProjectDir) } pCfg := p.GlobalCfg.DefaultProjCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp.Path, pWorkspace) @@ -553,6 +549,30 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex return projCtxs[i].ExecutionOrderGroup < projCtxs[j].ExecutionOrderGroup }) + // Filter projects to only include ones the user is authorized for + projCtxs = slices.DeleteFunc(projCtxs, func(projCtx command.ProjectContext) bool { + if projCtx.TeamAllowlistChecker == nil || !projCtx.TeamAllowlistChecker.HasRules() { + // allowlist restriction is not enabled + return false + } + ctx := models.TeamAllowlistCheckerContext{ + BaseRepo: projCtx.BaseRepo, + CommandName: projCtx.CommandName.String(), + EscapedCommentArgs: projCtx.EscapedCommentArgs, + HeadRepo: projCtx.HeadRepo, + Log: projCtx.Log, + Pull: projCtx.Pull, + ProjectName: projCtx.ProjectName, + RepoDir: repoDir, + RepoRelDir: projCtx.RepoRelDir, + User: projCtx.User, + Verbose: projCtx.Verbose, + Workspace: projCtx.Workspace, + API: false, + } + return !projCtx.TeamAllowlistChecker.IsCommandAllowedForAnyTeam(ctx, projCtx.User.Teams, projCtx.CommandName.String()) + }) + return projCtxs, nil } @@ -574,7 +594,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont defer unlockFn() ctx.Log.Debug("cloning repository") - _, _, err = p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return pcc, err } @@ -588,14 +608,14 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont if p.RestrictFileList { ctx.Log.Debug("'restrict-file-list' option is set, checking modified files") - modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.Pull.BaseRepo, ctx.Pull) + modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull) if err != nil { return nil, err } if p.IncludeGitUntrackedFiles { ctx.Log.Debug(("'include-git-untracked-files' option is set, getting untracked files")) - untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.HeadRepo, ctx.Pull, workspace) + untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return nil, err } @@ -652,7 +672,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont if DefaultWorkspace != workspace { ctx.Log.Debug("cloning repository with workspace %s", workspace) - _, _, err = p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, workspace) + _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return pcc, err } @@ -682,7 +702,7 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName repoCfgFile := p.GlobalCfg.RepoConfigFile(ctx.Pull.BaseRepo.ID()) hasRepoCfg, err := p.ParserValidator.HasRepoCfg(repoDir, repoCfgFile) if err != nil { - err = errors.Wrapf(err, "looking for %s file in %q", repoCfgFile, repoDir) + err = errors.Wrapf(err, "looking for '%s' file in '%s'", repoCfgFile, repoDir) return } if !hasRepoCfg { @@ -712,9 +732,9 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName } if len(projectsCfg) == 0 { if p.SilenceNoProjects && len(repoConfig.Projects) > 0 { - ctx.Log.Debug("no project with name %q found but silencing the error", projectName) + ctx.Log.Debug("no project with name '%s' found but silencing the error", projectName) } else { - err = fmt.Errorf("no project with name %q is defined in %s", projectName, repoCfgFile) + err = fmt.Errorf("no project with name '%s' is defined in '%s'", projectName, repoCfgFile) } return } @@ -726,7 +746,7 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName return } if len(projCfgs) > 1 { - err = fmt.Errorf("must specify project name: more than one project defined in %s matched dir: %q workspace: %q", repoCfgFile, dir, workspace) + err = fmt.Errorf("must specify project name: more than one project defined in '%s' matched dir: '%s' workspace: '%s'", repoCfgFile, dir, workspace) return } projectsCfg = projCfgs @@ -765,7 +785,7 @@ func (p *DefaultProjectCommandBuilder) buildAllProjectCommandsByPlan(ctx *comman for _, plan := range plans { commentCmds, err := p.buildProjectCommandCtx(ctx, commentCmd.CommandName(), commentCmd.SubName, plan.ProjectName, commentCmd.Flags, defaultRepoDir, plan.RepoRelDir, plan.Workspace, commentCmd.Verbose) if err != nil { - return nil, errors.Wrapf(err, "building command for dir %q", plan.RepoRelDir) + return nil, errors.Wrapf(err, "building command for dir '%s'", plan.RepoRelDir) } cmds = append(cmds, commentCmds...) } @@ -861,7 +881,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectCommandCtx(ctx *command.Conte repoRelDir = projCfg.RepoRelDir workspace = projCfg.Workspace for _, mp := range matchingProjects { - ctx.Log.Debug("Merging config for project at dir: %q workspace: %q", mp.Dir, mp.Workspace) + ctx.Log.Debug("Merging config for project at dir: '%s' workspace: '%s'", mp.Dir, mp.Workspace) projCfg = p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp, *repoCfgPtr) projCtxs = append(projCtxs, @@ -909,6 +929,30 @@ func (p *DefaultProjectCommandBuilder) buildProjectCommandCtx(ctx *command.Conte return []command.ProjectContext{}, err } + // Filter projects to only include ones the user is authorized for + projCtxs = slices.DeleteFunc(projCtxs, func(projCtx command.ProjectContext) bool { + if projCtx.TeamAllowlistChecker == nil || !projCtx.TeamAllowlistChecker.HasRules() { + // allowlist restriction is not enabled + return false + } + ctx := models.TeamAllowlistCheckerContext{ + BaseRepo: projCtx.BaseRepo, + CommandName: projCtx.CommandName.String(), + EscapedCommentArgs: projCtx.EscapedCommentArgs, + HeadRepo: projCtx.HeadRepo, + Log: projCtx.Log, + Pull: projCtx.Pull, + ProjectName: projCtx.ProjectName, + RepoDir: repoDir, + RepoRelDir: projCtx.RepoRelDir, + User: projCtx.User, + Verbose: projCtx.Verbose, + Workspace: projCtx.Workspace, + API: false, + } + return !projCtx.TeamAllowlistChecker.IsCommandAllowedForAnyTeam(ctx, projCtx.User.Teams, projCtx.CommandName.String()) + }) + return projCtxs, nil } diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go index 98ded0aa9c..d020871b31 100644 --- a/server/events/project_command_builder_internal_test.go +++ b/server/events/project_command_builder_internal_test.go @@ -84,7 +84,7 @@ workflows: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -143,7 +143,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -204,7 +204,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -273,7 +273,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{}, @@ -429,7 +429,7 @@ workflows: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -492,7 +492,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -558,7 +558,7 @@ workflows: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{}, expApplySteps: []string{}, @@ -609,7 +609,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -630,9 +630,11 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -670,7 +672,6 @@ projects: false, "auto", statsScope, - logger, terraformClient, ) @@ -692,9 +693,9 @@ projects: ErrEquals(t, c.expErr, err) return } - ctx := ctxs[0] Ok(t, err) + ctx := ctxs[0] // Construct expected steps. var stepNames []string @@ -823,7 +824,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -844,9 +845,11 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -860,7 +863,6 @@ projects: Ok(t, os.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) } - logger := logging.NewNoopLogger(t) statsScope, _, _ := metrics.NewLoggingScope(logging.NewNoopLogger(t), "atlantis") terraformClient := mocks.NewMockClient() @@ -887,7 +889,6 @@ projects: false, "auto", statsScope, - logger, terraformClient, ) @@ -909,9 +910,9 @@ projects: ErrEquals(t, c.expErr, err) return } - ctx := ctxs[0] Ok(t, err) + ctx := ctxs[0] Equals(t, 2, len(ctxs)) // Construct expected steps. @@ -1004,7 +1005,7 @@ repos: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPolicyCheckSteps: []string{"show", "policy_check"}, }, @@ -1068,7 +1069,7 @@ workflows: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, PolicySetTarget: "", }, expPolicyCheckSteps: []string{"policy_check"}, @@ -1089,9 +1090,11 @@ workflows: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -1133,7 +1136,6 @@ workflows: false, "auto", statsScope, - logger, terraformClient, ) @@ -1155,9 +1157,8 @@ workflows: return } - ctx := ctxs[1] - Ok(t, err) + ctx := ctxs[1] // Construct expected steps. var stepNames []string @@ -1243,9 +1244,11 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -1285,7 +1288,6 @@ projects: false, "auto", statsScope, - logger, terraformClient, ) @@ -1382,9 +1384,11 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.modifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.modifiedFiles, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -1426,7 +1430,6 @@ projects: false, "auto", statsScope, - logger, terraformClient, ) diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go index 1937f990f0..7560b5d6de 100644 --- a/server/events/project_command_builder_test.go +++ b/server/events/project_command_builder_test.go @@ -3,6 +3,7 @@ package events_test import ( "os" "path/filepath" + "sort" "strings" "testing" @@ -48,6 +49,19 @@ var defaultUserConfig = struct { AutoDiscoverMode: "auto", } +func ChangedFiles(dirStructure map[string]interface{}, parent string) []string { + var files []string + for k, v := range dirStructure { + switch v := v.(type) { + case map[string]interface{}: + files = append(files, ChangedFiles(v, k)...) + default: + files = append(files, filepath.Join(parent, k)) + } + } + return files +} + func TestDefaultProjectCommandBuilder_BuildAutoplanCommands(t *testing.T) { // expCtxFields define the ctx fields we're going to assert on. // Since we're focused on autoplanning here, we don't validate all the @@ -57,11 +71,16 @@ func TestDefaultProjectCommandBuilder_BuildAutoplanCommands(t *testing.T) { RepoRelDir string Workspace string } + defaultTestDirStructure := map[string]interface{}{ + "main.tf": nil, + } + cases := []struct { - Description string - AtlantisYAML string - ServerSideYAML string - exp []expCtxFields + Description string + AtlantisYAML string + ServerSideYAML string + TestDirStructure map[string]interface{} + exp []expCtxFields }{ { Description: "simple atlantis.yaml", @@ -70,6 +89,7 @@ version: 3 projects: - dir: . `, + TestDirStructure: defaultTestDirStructure, exp: []expCtxFields{ { ProjectName: "", @@ -94,6 +114,7 @@ projects: name: myname workspace: myworkspace2 `, + TestDirStructure: defaultTestDirStructure, exp: []expCtxFields{ { ProjectName: "", @@ -122,6 +143,7 @@ projects: - dir: . workspace: myworkspace2 `, + TestDirStructure: defaultTestDirStructure, exp: []expCtxFields{ { ProjectName: "", @@ -142,7 +164,68 @@ version: 3 projects: - dir: mydir `, - exp: nil, + TestDirStructure: defaultTestDirStructure, + exp: nil, + }, + { + Description: "workspaces from subdirectories detected", + TestDirStructure: map[string]interface{}{ + "work": map[string]interface{}{ + "main.tf": ` +terraform { + cloud { + organization = "atlantis-test" + workspaces { + name = "test-workspace1" + } + } +}`, + }, + "test": map[string]interface{}{ + "main.tf": ` +terraform { + cloud { + organization = "atlantis-test" + workspaces { + name = "test-workspace12" + } + } +}`, + }, + }, + exp: []expCtxFields{ + { + ProjectName: "", + RepoRelDir: "test", + Workspace: "test-workspace12", + }, + { + ProjectName: "", + RepoRelDir: "work", + Workspace: "test-workspace1", + }, + }, + }, + { + Description: "workspaces in parent directory are detected", + TestDirStructure: map[string]interface{}{ + "main.tf": ` +terraform { + cloud { + organization = "atlantis-test" + workspaces { + name = "test-workspace" + } + } +}`, + }, + exp: []expCtxFields{ + { + ProjectName: "", + RepoRelDir: ".", + Workspace: "test-workspace", + }, + }, }, } @@ -151,19 +234,17 @@ projects: userConfig := defaultUserConfig terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) for _, c := range cases { t.Run(c.Description, func(t *testing.T) { RegisterMockTestingT(t) - tmpDir := DirStructure(t, map[string]interface{}{ - "main.tf": nil, - }) - + tmpDir := DirStructure(t, c.TestDirStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(ChangedFiles(c.TestDirStructure, ""), nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) @@ -193,7 +274,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -206,6 +286,17 @@ projects: }) Ok(t, err) Equals(t, len(c.exp), len(ctxs)) + + // Sort so comparisons are deterministic + sort.Slice(ctxs, func(i, j int) bool { + if ctxs[i].ProjectName != ctxs[j].ProjectName { + return ctxs[i].ProjectName < ctxs[j].ProjectName + } + if ctxs[i].RepoRelDir != ctxs[j].RepoRelDir { + return ctxs[i].RepoRelDir < ctxs[j].RepoRelDir + } + return ctxs[i].Workspace < ctxs[j].Workspace + }) for i, actCtx := range ctxs { expCtx := c.exp[i] Equals(t, expCtx.ProjectName, actCtx.ProjectName) @@ -383,7 +474,7 @@ projects: dir: . workspace: myworkspace `, - ExpErr: "must specify project name: more than one project defined in atlantis.yaml matched dir: \".\" workspace: \"myworkspace\"", + ExpErr: "must specify project name: more than one project defined in 'atlantis.yaml' matched dir: '.' workspace: 'myworkspace'", }, { Description: "atlantis.yaml with project flag not matching", @@ -398,7 +489,7 @@ version: 3 projects: - dir: . `, - ExpErr: "no project with name \"notconfigured\" is defined in atlantis.yaml", + ExpErr: "no project with name 'notconfigured' is defined in 'atlantis.yaml'", }, { Description: "atlantis.yaml with project flag not matching but silenced", @@ -510,10 +601,12 @@ projects: }) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) @@ -524,7 +617,6 @@ projects: } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -548,7 +640,6 @@ projects: userConfig.IncludeGitUntrackedFiles, c.AutoDiscoverModeUserCfg, scope, - logger, terraformClient, ) @@ -698,10 +789,12 @@ projects: tmpDir := DirStructure(t, c.DirectoryStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) @@ -712,7 +805,6 @@ projects: } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -736,7 +828,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1027,10 +1118,12 @@ projects: tmpDir := DirStructure(t, c.DirStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) @@ -1041,7 +1134,6 @@ projects: } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1065,7 +1157,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1141,7 +1232,6 @@ func TestDefaultProjectCommandBuilder_BuildMultiApply(t *testing.T) { scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1165,7 +1255,6 @@ func TestDefaultProjectCommandBuilder_BuildMultiApply(t *testing.T) { userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1217,14 +1306,9 @@ projects: err := os.WriteFile(filepath.Join(repoDir, valid.DefaultAtlantisFile), []byte(yamlCfg), 0600) Ok(t, err) - When(workingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(repoDir, false, nil) - When(workingDir.GetWorkingDir( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, nil) + When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(repoDir, nil) globalCfgArgs := valid.GlobalCfgArgs{ AllowAllRepoSettings: true, @@ -1234,7 +1318,6 @@ projects: userConfig := defaultUserConfig terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1258,7 +1341,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1312,17 +1394,18 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) { }) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) globalCfgArgs := valid.GlobalCfgArgs{ AllowAllRepoSettings: true, } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1346,7 +1429,6 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) { userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1465,18 +1547,12 @@ projects: tmpDir := DirStructure(t, testCase.DirStructure) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(testCase.ModifiedFiles, nil) - + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(testCase.ModifiedFiles, nil) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) - - When(workingDir.GetWorkingDir( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, nil) + When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) globalCfgArgs := valid.GlobalCfgArgs{ AllowAllRepoSettings: true, @@ -1515,7 +1591,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1589,9 +1664,11 @@ projects: for _, c := range cases { RegisterMockTestingT(t) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(vcsClient.GetModifiedFiles( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) When(vcsClient.SupportsSingleFileDownload(Any[models.Repo]())).ThenReturn(true) - When(vcsClient.GetFileContent(Any[models.PullRequest](), Any[string]())).ThenReturn(true, []byte(c.AtlantisYAML), nil) + When(vcsClient.GetFileContent( + Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[string]())).ThenReturn(true, []byte(c.AtlantisYAML), nil) workingDir := mocks.NewMockWorkingDir() logger := logging.NewNoopLogger(t) @@ -1601,7 +1678,6 @@ projects: } scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1625,7 +1701,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1643,7 +1718,8 @@ projects: }) Ok(t, err) Equals(t, c.ExpectedCtxs, len(actCtxs)) - workingDir.VerifyWasCalled(c.ExpectedClones).Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]()) + workingDir.VerifyWasCalled(c.ExpectedClones).Clone(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest](), Any[string]()) } } @@ -1658,9 +1734,11 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman userConfig := defaultUserConfig workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) globalCfgArgs := valid.GlobalCfgArgs{ AllowAllRepoSettings: false, @@ -1669,7 +1747,6 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman globalCfg := valid.NewGlobalCfgFromArgs(globalCfgArgs) terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( true, @@ -1693,7 +1770,6 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1759,7 +1835,6 @@ func TestDefaultProjectCommandBuilder_BuildVersionCommand(t *testing.T) { AllowAllRepoSettings: false, } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1783,7 +1858,6 @@ func TestDefaultProjectCommandBuilder_BuildVersionCommand(t *testing.T) { userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1877,18 +1951,20 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_Single_With_RestrictFile tmpDir := DirStructure(t, c.DirectoryStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) - When(workingDir.GetGitUntrackedFiles(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil) + When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(c.UntrackedFiles, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, // policyChecksSupported @@ -1912,7 +1988,6 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_Single_With_RestrictFile userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1987,18 +2062,20 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_with_IncludeGitUntracked tmpDir := DirStructure(t, c.DirectoryStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) - When(workingDir.GetGitUntrackedFiles(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil) + When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(c.UntrackedFiles, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, // policyChecksSupported @@ -2022,7 +2099,6 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_with_IncludeGitUntracked userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) diff --git a/server/events/project_command_context_builder.go b/server/events/project_command_context_builder.go index 5ed6dad94c..19c1c8ff34 100644 --- a/server/events/project_command_context_builder.go +++ b/server/events/project_command_context_builder.go @@ -130,7 +130,7 @@ func (cb *DefaultProjectCommandContextBuilder) BuildProjectContext( projectCmdContext := newProjectCommandContext( ctx, cmdName, - cb.CommentBuilder.BuildApplyComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, prjCfg.AutoMergeDisabled), + cb.CommentBuilder.BuildApplyComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, prjCfg.AutoMergeDisabled, prjCfg.AutoMergeMethod), cb.CommentBuilder.BuildApprovePoliciesComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name), cb.CommentBuilder.BuildPlanComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, commentFlags), prjCfg, @@ -145,6 +145,7 @@ func (cb *DefaultProjectCommandContextBuilder) BuildProjectContext( ctx.Scope, ctx.PullRequestStatus, ctx.PullStatus, + ctx.TeamAllowlistChecker, ) projectCmds = append(projectCmds, projectCmdContext) @@ -202,7 +203,7 @@ func (cb *PolicyCheckProjectCommandContextBuilder) BuildProjectContext( projectCmds = append(projectCmds, newProjectCommandContext( ctx, command.PolicyCheck, - cb.CommentBuilder.BuildApplyComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, prjCfg.AutoMergeDisabled), + cb.CommentBuilder.BuildApplyComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, prjCfg.AutoMergeDisabled, prjCfg.AutoMergeMethod), cb.CommentBuilder.BuildApprovePoliciesComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name), cb.CommentBuilder.BuildPlanComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, commentFlags), prjCfg, @@ -217,6 +218,7 @@ func (cb *PolicyCheckProjectCommandContextBuilder) BuildProjectContext( ctx.Scope, ctx.PullRequestStatus, ctx.PullStatus, + ctx.TeamAllowlistChecker, )) } @@ -242,6 +244,7 @@ func newProjectCommandContext(ctx *command.Context, scope tally.Scope, pullReqStatus models.PullReqStatus, pullStatus *models.PullStatus, + teamAllowlistChecker command.TeamAllowlistChecker, ) command.ProjectContext { var projectPlanStatus models.ProjectPlanStatus @@ -273,7 +276,7 @@ func newProjectCommandContext(ctx *command.Context, EscapedCommentArgs: escapedCommentArgs, AutomergeEnabled: automergeEnabled, DeleteSourceBranchOnMerge: projCfg.DeleteSourceBranchOnMerge, - RepoLocking: projCfg.RepoLocking, + RepoLocksMode: projCfg.RepoLocks.Mode, CustomPolicyCheck: projCfg.CustomPolicyCheck, ParallelApplyEnabled: parallelApplyEnabled, ParallelPlanEnabled: parallelPlanEnabled, @@ -306,6 +309,8 @@ func newProjectCommandContext(ctx *command.Context, JobID: uuid.New().String(), ExecutionOrderGroup: projCfg.ExecutionOrderGroup, AbortOnExcecutionOrderFail: abortOnExcecutionOrderFail, + SilencePRComments: projCfg.SilencePRComments, + TeamAllowlistChecker: teamAllowlistChecker, } } diff --git a/server/events/project_command_context_builder_test.go b/server/events/project_command_context_builder_test.go index 8bee1d9fb0..84ce0ff630 100644 --- a/server/events/project_command_context_builder_test.go +++ b/server/events/project_command_context_builder_test.go @@ -48,11 +48,10 @@ func TestProjectCommandContextBuilder_PullStatus(t *testing.T) { expectedPlanCmt := "Plan Comment" terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(commandCtx.Log)) t.Run("with project name defined", func(t *testing.T) { When(mockCommentBuilder.BuildPlanComment(projRepoRelDir, projWorkspace, projName, []string{})).ThenReturn(expectedPlanCmt) - When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, projName, false)).ThenReturn(expectedApplyCmt) + When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, projName, false, "")).ThenReturn(expectedApplyCmt) pullStatus.Projects = []models.ProjectStatus{ { @@ -69,7 +68,7 @@ func TestProjectCommandContextBuilder_PullStatus(t *testing.T) { t.Run("with no project name defined", func(t *testing.T) { projCfg.Name = "" When(mockCommentBuilder.BuildPlanComment(projRepoRelDir, projWorkspace, "", []string{})).ThenReturn(expectedPlanCmt) - When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, "", false)).ThenReturn(expectedApplyCmt) + When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, "", false, "")).ThenReturn(expectedApplyCmt) pullStatus.Projects = []models.ProjectStatus{ { Status: models.ErroredPlanStatus, @@ -89,7 +88,7 @@ func TestProjectCommandContextBuilder_PullStatus(t *testing.T) { t.Run("when ParallelApply is set to true", func(t *testing.T) { projCfg.Name = "Apply Comment" When(mockCommentBuilder.BuildPlanComment(projRepoRelDir, projWorkspace, "", []string{})).ThenReturn(expectedPlanCmt) - When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, "", false)).ThenReturn(expectedApplyCmt) + When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, "", false, "")).ThenReturn(expectedApplyCmt) pullStatus.Projects = []models.ProjectStatus{ { Status: models.ErroredPlanStatus, @@ -110,7 +109,7 @@ func TestProjectCommandContextBuilder_PullStatus(t *testing.T) { t.Run("when AbortOnExcecutionOrderFail is set to true", func(t *testing.T) { projCfg.Name = "Apply Comment" When(mockCommentBuilder.BuildPlanComment(projRepoRelDir, projWorkspace, "", []string{})).ThenReturn(expectedPlanCmt) - When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, "", false)).ThenReturn(expectedApplyCmt) + When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, "", false, "")).ThenReturn(expectedApplyCmt) pullStatus.Projects = []models.ProjectStatus{ { Status: models.ErroredPlanStatus, diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index cd0130ab6e..26d4dc2cc2 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -65,20 +65,42 @@ type StepRunner interface { // CustomStepRunner runs custom run steps. type CustomStepRunner interface { // Run cmd in path. - Run(ctx command.ProjectContext, cmd string, path string, envs map[string]string, streamOutput bool, postProcessOutput valid.PostProcessRunOutputOption) (string, error) + Run( + ctx command.ProjectContext, + shell *valid.CommandShell, + cmd string, + path string, + envs map[string]string, + streamOutput bool, + postProcessOutput valid.PostProcessRunOutputOption, + ) (string, error) } //go:generate pegomock generate --package mocks -o mocks/mock_env_step_runner.go EnvStepRunner // EnvStepRunner runs env steps. type EnvStepRunner interface { - Run(ctx command.ProjectContext, cmd string, value string, path string, envs map[string]string) (string, error) + Run( + ctx command.ProjectContext, + shell *valid.CommandShell, + cmd string, + value string, + path string, + envs map[string]string, + ) (string, error) } // MultiEnvStepRunner runs multienv steps. type MultiEnvStepRunner interface { // Run cmd in path. - Run(ctx command.ProjectContext, cmd string, path string, envs map[string]string) (string, error) + Run( + ctx command.ProjectContext, + shell *valid.CommandShell, + cmd string, + path string, + envs map[string]string, + postProcessOutput valid.PostProcessRunOutputOption, + ) (string, error) } //go:generate pegomock generate --package mocks -o mocks/mock_webhooks_sender.go WebhooksSender @@ -225,13 +247,14 @@ type DefaultProjectCommandRunner struct { func (p *DefaultProjectCommandRunner) Plan(ctx command.ProjectContext) command.ProjectResult { planSuccess, failure, err := p.doPlan(ctx) return command.ProjectResult{ - Command: command.Plan, - PlanSuccess: planSuccess, - Error: err, - Failure: failure, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, + Command: command.Plan, + PlanSuccess: planSuccess, + Error: err, + Failure: failure, + RepoRelDir: ctx.RepoRelDir, + Workspace: ctx.Workspace, + ProjectName: ctx.ProjectName, + SilencePRComments: ctx.SilencePRComments, } } @@ -253,13 +276,14 @@ func (p *DefaultProjectCommandRunner) PolicyCheck(ctx command.ProjectContext) co func (p *DefaultProjectCommandRunner) Apply(ctx command.ProjectContext) command.ProjectResult { applyOut, failure, err := p.doApply(ctx) return command.ProjectResult{ - Command: command.Apply, - Failure: failure, - Error: err, - ApplySuccess: applyOut, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, + Command: command.Apply, + Failure: failure, + Error: err, + ApplySuccess: applyOut, + RepoRelDir: ctx.RepoRelDir, + Workspace: ctx.Workspace, + ProjectName: ctx.ProjectName, + SilencePRComments: ctx.SilencePRComments, } } @@ -320,7 +344,7 @@ func (p *DefaultProjectCommandRunner) StateRm(ctx command.ProjectContext) comman func (p *DefaultProjectCommandRunner) doApprovePolicies(ctx command.ProjectContext) (*models.PolicyCheckResults, string, error) { // Acquire Atlantis lock for this repo/dir/workspace. - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode == valid.RepoLocksOnPlanMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") } @@ -375,7 +399,7 @@ func (p *DefaultProjectCommandRunner) doApprovePolicies(ctx command.ProjectConte ignorePolicy = true } // Increment approval if user is owner. - if isOwner && !ignorePolicy { + if isOwner && !ignorePolicy && (ctx.User.Username != ctx.Pull.Author || !policySet.PreventSelfApprove) { if !ctx.ClearPolicyApproval { prjPolicyStatus[i].Approvals = policyStatus.Approvals + 1 } else { @@ -389,6 +413,7 @@ func (p *DefaultProjectCommandRunner) doApprovePolicies(ctx command.ProjectConte if !policyStatus.Passed && (prjPolicyStatus[i].Approvals != policySet.ApproveCount) { allPassed = false } + prjPolicySetResults = append(prjPolicySetResults, models.PolicySetResult{ PolicySetName: policySet.Name, Passed: policyStatus.Passed, @@ -417,7 +442,7 @@ func (p *DefaultProjectCommandRunner) doPolicyCheck(ctx command.ProjectContext) // we will attempt to capture the lock here but fail to get the working directory // at which point we will unlock again to preserve functionality // If we fail to capture the lock here (super unlikely) then we error out and the user is forced to replan - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode == valid.RepoLocksOnPlanMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") @@ -536,7 +561,7 @@ func (p *DefaultProjectCommandRunner) doPolicyCheck(ctx command.ProjectContext) func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*models.PlanSuccess, string, error) { // Acquire Atlantis lock for this repo/dir/workspace. - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode == valid.RepoLocksOnPlanMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") } @@ -554,7 +579,7 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model p.WorkingDir.SetCheckForUpstreamChanges() // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) @@ -612,6 +637,16 @@ func (p *DefaultProjectCommandRunner) doApply(ctx command.ProjectContext) (apply return "", failure, err } + // Acquire Atlantis lock for this repo/dir/workspace. + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode == valid.RepoLocksOnApplyMode) + if err != nil { + return "", "", errors.Wrap(err, "acquiring lock") + } + if !lockAttempt.LockAcquired { + return "", lockAttempt.LockFailureReason, nil + } + ctx.Log.Debug("acquired lock for project") + // Acquire internal lock for the directory we're going to operate in. unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, ctx.Workspace, ctx.RepoRelDir) if err != nil { @@ -667,7 +702,7 @@ func (p *DefaultProjectCommandRunner) doVersion(ctx command.ProjectContext) (ver func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out *models.ImportSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { return nil, "", cloneErr } @@ -682,7 +717,7 @@ func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out } // Acquire Atlantis lock for this repo/dir/workspace. - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode != valid.RepoLocksDisabledMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") } @@ -713,7 +748,7 @@ func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out func (p *DefaultProjectCommandRunner) doStateRm(ctx command.ProjectContext) (out *models.StateRmSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { return nil, "", cloneErr } @@ -723,7 +758,7 @@ func (p *DefaultProjectCommandRunner) doStateRm(ctx command.ProjectContext) (out } // Acquire Atlantis lock for this repo/dir/workspace. - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode != valid.RepoLocksDisabledMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") } @@ -777,15 +812,15 @@ func (p *DefaultProjectCommandRunner) runSteps(steps []valid.Step, ctx command.P case "state_rm": out, err = p.StateRmStepRunner.Run(ctx, step.ExtraArgs, absPath, envs) case "run": - out, err = p.RunStepRunner.Run(ctx, step.RunCommand, absPath, envs, true, step.Output) + out, err = p.RunStepRunner.Run(ctx, step.RunShell, step.RunCommand, absPath, envs, true, step.Output) case "env": - out, err = p.EnvStepRunner.Run(ctx, step.RunCommand, step.EnvVarValue, absPath, envs) + out, err = p.EnvStepRunner.Run(ctx, step.RunShell, step.RunCommand, step.EnvVarValue, absPath, envs) envs[step.EnvVarName] = out // We reset out to the empty string because we don't want it to // be printed to the PR, it's solely to set the environment variable. out = "" case "multienv": - out, err = p.MultiEnvStepRunner.Run(ctx, step.RunCommand, absPath, envs) + out, err = p.MultiEnvStepRunner.Run(ctx, step.RunShell, step.RunCommand, absPath, envs, step.Output) } if out != "" { diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go index 786b67088e..68548efdd0 100644 --- a/server/events/project_command_runner_test.go +++ b/server/events/project_command_runner_test.go @@ -63,22 +63,10 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { } repoDir := t.TempDir() - When(mockWorkingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string](), - )).ThenReturn(repoDir, false, nil) - When(mockLocker.TryLock( - Any[logging.SimpleLogging](), - Any[models.PullRequest](), - Any[models.User](), - Any[string](), - Any[models.Project](), - AnyBool(), - )).ThenReturn(&events.TryLockResponse{ - LockAcquired: true, - LockKey: "lock-key", - }, nil) + When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(repoDir, false, nil) + When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](), + Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil) expEnvs := map[string]string{ "name": "value", @@ -111,7 +99,7 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { When(mockInit.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("init", nil) When(mockPlan.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("plan", nil) When(mockApply.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("apply", nil) - When(mockRun.Run(ctx, "", repoDir, expEnvs, true, "")).ThenReturn("run", nil) + When(mockRun.Run(ctx, nil, "", repoDir, expEnvs, true, "")).ThenReturn("run", nil) res := runner.Plan(ctx) Assert(t, res.PlanSuccess != nil, "exp plan success") @@ -127,7 +115,7 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { case "apply": mockApply.VerifyWasCalledOnce().Run(ctx, nil, repoDir, expEnvs) case "run": - mockRun.VerifyWasCalledOnce().Run(ctx, "", repoDir, expEnvs, true, "") + mockRun.VerifyWasCalledOnce().Run(ctx, nil, "", repoDir, expEnvs, true, "") } } } @@ -317,7 +305,7 @@ func TestDefaultProjectCommandRunner_ApplyDiverged(t *testing.T) { } tmp := t.TempDir() When(mockWorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, ctx.Workspace)).ThenReturn(tmp, nil) - When(mockWorkingDir.HasDiverged(tmp)).ThenReturn(true) + When(mockWorkingDir.HasDiverged(ctx.Log, tmp)).ThenReturn(true) res := runner.Apply(ctx) Equals(t, "Default branch must be rebased onto pull request before running apply.", res.Failure) @@ -436,6 +424,17 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { Any[models.PullRequest](), Any[string](), )).ThenReturn(repoDir, nil) + When(mockLocker.TryLock( + Any[logging.SimpleLogging](), + Any[models.PullRequest](), + Any[models.User](), + Any[string](), + Any[models.Project](), + AnyBool(), + )).ThenReturn(&events.TryLockResponse{ + LockAcquired: true, + LockKey: "lock-key", + }, nil) ctx := command.ProjectContext{ Log: logging.NewNoopLogger(t), @@ -456,8 +455,8 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { When(mockInit.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("init", nil) When(mockPlan.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("plan", nil) When(mockApply.Run(ctx, nil, repoDir, expEnvs)).ThenReturn("apply", nil) - When(mockRun.Run(ctx, "", repoDir, expEnvs, true, "")).ThenReturn("run", nil) - When(mockEnv.Run(ctx, "", "value", repoDir, make(map[string]string))).ThenReturn("value", nil) + When(mockRun.Run(ctx, nil, "", repoDir, expEnvs, true, "")).ThenReturn("run", nil) + When(mockEnv.Run(ctx, nil, "", "value", repoDir, make(map[string]string))).ThenReturn("value", nil) res := runner.Apply(ctx) Equals(t, c.expOut, res.ApplySuccess) @@ -472,9 +471,9 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { case "apply": mockApply.VerifyWasCalledOnce().Run(ctx, nil, repoDir, expEnvs) case "run": - mockRun.VerifyWasCalledOnce().Run(ctx, "", repoDir, expEnvs, true, "") + mockRun.VerifyWasCalledOnce().Run(ctx, nil, "", repoDir, expEnvs, true, "") case "env": - mockEnv.VerifyWasCalledOnce().Run(ctx, "", "value", repoDir, expEnvs) + mockEnv.VerifyWasCalledOnce().Run(ctx, nil, "", "value", repoDir, expEnvs) } } }) @@ -507,6 +506,17 @@ func TestDefaultProjectCommandRunner_ApplyRunStepFailure(t *testing.T) { Any[models.PullRequest](), Any[string](), )).ThenReturn(repoDir, nil) + When(mockLocker.TryLock( + Any[logging.SimpleLogging](), + Any[models.PullRequest](), + Any[models.User](), + Any[string](), + Any[models.Project](), + AnyBool(), + )).ThenReturn(&events.TryLockResponse{ + LockAcquired: true, + LockKey: "lock-key", + }, nil) ctx := command.ProjectContext{ Log: logging.NewNoopLogger(t), @@ -560,22 +570,10 @@ func TestDefaultProjectCommandRunner_RunEnvSteps(t *testing.T) { } repoDir := t.TempDir() - When(mockWorkingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string](), - )).ThenReturn(repoDir, false, nil) - When(mockLocker.TryLock( - Any[logging.SimpleLogging](), - Any[models.PullRequest](), - Any[models.User](), - Any[string](), - Any[models.Project](), - AnyBool(), - )).ThenReturn(&events.TryLockResponse{ - LockAcquired: true, - LockKey: "lock-key", - }, nil) + When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(repoDir, false, nil) + When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](), + Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil) ctx := command.ProjectContext{ Log: logging.NewNoopLogger(t), @@ -714,11 +712,8 @@ func TestDefaultProjectCommandRunner_Import(t *testing.T) { RePlanCmd: "atlantis plan -d . -- addr id", } repoDir := t.TempDir() - When(mockWorkingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string](), - )).ThenReturn(repoDir, false, nil) + When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(repoDir, false, nil) if c.setup != nil { c.setup(repoDir, ctx, mockLocker, mockInit, mockImport) } @@ -1173,6 +1168,56 @@ func TestDefaultProjectCommandRunner_ApprovePolicies(t *testing.T) { expFailure: `One or more policy sets require additional approval.`, hasErr: false, }, + { + description: "Policy Approval should not be the Author of the PR", + userTeams: []string{"someuserteam"}, + clearPolicyApproval: false, + policySetCfg: valid.PolicySets{ + PolicySets: []valid.PolicySet{ + { + Owners: valid.PolicyOwners{ + Users: []string{"lkysow"}, + }, + Name: "policy1", + ApproveCount: 1, + }, + { + Owners: valid.PolicyOwners{ + Users: []string{"lkysow"}, + }, + Name: "policy2", + ApproveCount: 1, + PreventSelfApprove: true, + }, + }, + }, + policySetStatus: []models.PolicySetStatus{ + { + PolicySetName: "policy1", + Approvals: 0, + Passed: false, + }, + { + PolicySetName: "policy2", + Approvals: 0, + Passed: false, + }, + }, + expOut: []models.PolicySetResult{ + { + PolicySetName: "policy1", + ReqApprovals: 1, + CurApprovals: 1, + }, + { + PolicySetName: "policy2", + ReqApprovals: 1, + CurApprovals: 0, + }, + }, + expFailure: `One or more policy sets require additional approval.`, + hasErr: true, + }, } for _, c := range cases { @@ -1230,7 +1275,7 @@ func TestDefaultProjectCommandRunner_ApprovePolicies(t *testing.T) { projPolicyStatus = c.policySetStatus } - modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} + modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num, Author: testdata.User.Username} When(runner.VcsClient.GetTeamNamesForUser(testdata.GithubRepo, testdata.User)).ThenReturn(c.userTeams, nil) ctx := command.ProjectContext{ User: testdata.User, diff --git a/server/events/project_locker_test.go b/server/events/project_locker_test.go index 62be1c40f9..268faf20ee 100644 --- a/server/events/project_locker_test.go +++ b/server/events/project_locker_test.go @@ -29,7 +29,7 @@ import ( func TestDefaultProjectLocker_TryLockWhenLocked(t *testing.T) { var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil) + mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil) mockLocker := mocks.NewMockLocker() locker := events.DefaultProjectLocker{ Locker: mockLocker, @@ -65,7 +65,7 @@ func TestDefaultProjectLocker_TryLockWhenLocked(t *testing.T) { func TestDefaultProjectLocker_TryLockWhenLockedSamePull(t *testing.T) { RegisterMockTestingT(t) var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil) + mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil) mockLocker := mocks.NewMockLocker() locker := events.DefaultProjectLocker{ Locker: mockLocker, @@ -104,7 +104,7 @@ func TestDefaultProjectLocker_TryLockWhenLockedSamePull(t *testing.T) { func TestDefaultProjectLocker_TryLockUnlocked(t *testing.T) { RegisterMockTestingT(t) var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil) + mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil) mockLocker := mocks.NewMockLocker() locker := events.DefaultProjectLocker{ Locker: mockLocker, @@ -142,7 +142,7 @@ func TestDefaultProjectLocker_TryLockUnlocked(t *testing.T) { func TestDefaultProjectLocker_RepoLocking(t *testing.T) { var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil) + mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil) expProject := models.Project{} expWorkspace := "default" expPull := models.PullRequest{Num: 2} diff --git a/server/events/pull_closed_executor.go b/server/events/pull_closed_executor.go index de3e4e8009..5c005dbc9a 100644 --- a/server/events/pull_closed_executor.go +++ b/server/events/pull_closed_executor.go @@ -30,19 +30,19 @@ import ( "github.com/runatlantis/atlantis/server/jobs" ) -//go:generate pegomock generate --package mocks -o mocks/mock_resource_cleaner.go ResourceCleaner +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_resource_cleaner.go ResourceCleaner type ResourceCleaner interface { CleanUp(pullInfo jobs.PullInfo) } -//go:generate pegomock generate --package mocks -o mocks/mock_pull_cleaner.go PullCleaner +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_pull_cleaner.go PullCleaner // PullCleaner cleans up pull requests after they're closed/merged. type PullCleaner interface { // CleanUpPull deletes the workspaces used by the pull request on disk // and deletes any locks associated with this pull request for all workspaces. - CleanUpPull(repo models.Repo, pull models.PullRequest) error + CleanUpPull(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) error } // PullClosedExecutor executes the tasks required to clean up a closed pull @@ -51,7 +51,6 @@ type PullClosedExecutor struct { Locker locking.Locker VCSClient vcs.Client WorkingDir WorkingDir - Logger logging.SimpleLogging Backend locking.Backend PullClosedTemplate PullCleanupTemplate LogStreamResourceCleaner ResourceCleaner @@ -78,11 +77,11 @@ func (t *PullClosedEventTemplate) Execute(wr io.Writer, data interface{}) error } // CleanUpPull cleans up after a closed pull request. -func (p *PullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullRequest) error { +func (p *PullClosedExecutor) CleanUpPull(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) error { pullStatus, err := p.Backend.GetPullStatus(pull) if err != nil { // Log and continue to clean up other resources. - p.Logger.Err("retrieving pull status: %s", err) + logger.Err("retrieving pull status: %s", err) } if pullStatus != nil { @@ -97,7 +96,7 @@ func (p *PullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullReque } } - if err := p.WorkingDir.Delete(repo, pull); err != nil { + if err := p.WorkingDir.Delete(logger, repo, pull); err != nil { return errors.Wrap(err, "cleaning workspace") } @@ -111,7 +110,7 @@ func (p *PullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullReque // Delete pull from DB. if err := p.Backend.DeletePullStatus(pull); err != nil { - p.Logger.Err("deleting pull from db: %s", err) + logger.Err("deleting pull from db: %s", err) } // If there are no locks then there's no need to comment. @@ -124,7 +123,7 @@ func (p *PullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullReque if err = pullClosedTemplate.Execute(&buf, templateData); err != nil { return errors.Wrap(err, "rendering template for comment") } - return p.VCSClient.CreateComment(repo, pull.Num, buf.String(), "") + return p.VCSClient.CreateComment(logger, repo, pull.Num, buf.String(), "") } // buildTemplateData formats the lock data into a slice that can easily be diff --git a/server/events/pull_closed_executor_test.go b/server/events/pull_closed_executor_test.go index 2992f4f820..df904a1c6f 100644 --- a/server/events/pull_closed_executor_test.go +++ b/server/events/pull_closed_executor_test.go @@ -39,6 +39,7 @@ import ( func TestCleanUpPullWorkspaceErr(t *testing.T) { t.Log("when workspace.Delete returns an error, we return it") RegisterMockTestingT(t) + logger := logging.NewNoopLogger(t) w := mocks.NewMockWorkingDir() tmp := t.TempDir() db, err := db.New(tmp) @@ -49,14 +50,15 @@ func TestCleanUpPullWorkspaceErr(t *testing.T) { Backend: db, } err = errors.New("err") - When(w.Delete(testdata.GithubRepo, testdata.Pull)).ThenReturn(err) - actualErr := pce.CleanUpPull(testdata.GithubRepo, testdata.Pull) + When(w.Delete(logger, testdata.GithubRepo, testdata.Pull)).ThenReturn(err) + actualErr := pce.CleanUpPull(logger, testdata.GithubRepo, testdata.Pull) Equals(t, "cleaning workspace: err", actualErr.Error()) } func TestCleanUpPullUnlockErr(t *testing.T) { t.Log("when locker.UnlockByPull returns an error, we return it") RegisterMockTestingT(t) + logger := logging.NewNoopLogger(t) w := mocks.NewMockWorkingDir() l := lockmocks.NewMockLocker() tmp := t.TempDir() @@ -70,11 +72,12 @@ func TestCleanUpPullUnlockErr(t *testing.T) { } err = errors.New("err") When(l.UnlockByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(nil, err) - actualErr := pce.CleanUpPull(testdata.GithubRepo, testdata.Pull) + actualErr := pce.CleanUpPull(logger, testdata.GithubRepo, testdata.Pull) Equals(t, "cleaning up locks: err", actualErr.Error()) } func TestCleanUpPullNoLocks(t *testing.T) { + logger := logging.NewNoopLogger(t) t.Log("when there are no locks to clean up, we don't comment") RegisterMockTestingT(t) w := mocks.NewMockWorkingDir() @@ -90,12 +93,13 @@ func TestCleanUpPullNoLocks(t *testing.T) { Backend: db, } When(l.UnlockByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(nil, nil) - err = pce.CleanUpPull(testdata.GithubRepo, testdata.Pull) + err = pce.CleanUpPull(logger, testdata.GithubRepo, testdata.Pull) Ok(t, err) - cp.VerifyWasCalled(Never()).CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()) + cp.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } func TestCleanUpPullComments(t *testing.T) { + logger := logging.NewNoopLogger(t) t.Log("should comment correctly") RegisterMockTestingT(t) cases := []struct { @@ -187,9 +191,10 @@ func TestCleanUpPullComments(t *testing.T) { } t.Log("testing: " + c.Description) When(l.UnlockByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(c.Locks, nil) - err = pce.CleanUpPull(testdata.GithubRepo, testdata.Pull) + err = pce.CleanUpPull(logger, testdata.GithubRepo, testdata.Pull) Ok(t, err) - _, _, comment, _ := cp.VerifyWasCalledOnce().CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetCapturedArguments() + _, _, _, comment, _ := cp.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetCapturedArguments() expected := "Locks and plans deleted for the projects and workspaces modified in this pull request:\n\n" + c.Exp Equals(t, expected, comment) @@ -266,7 +271,6 @@ func TestCleanUpLogStreaming(t *testing.T) { VCSClient: client, PullClosedTemplate: &events.PullClosedEventTemplate{}, LogStreamResourceCleaner: prjCmdOutHandler, - Logger: logger, } locks := []models.ProjectLock{ @@ -278,11 +282,12 @@ func TestCleanUpLogStreaming(t *testing.T) { When(locker.UnlockByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(locks, nil) // Clean up. - err = pullClosedExecutor.CleanUpPull(testdata.GithubRepo, testdata.Pull) + err = pullClosedExecutor.CleanUpPull(logger, testdata.GithubRepo, testdata.Pull) Ok(t, err) close(prjCmdOutput) - _, _, comment, _ := client.VerifyWasCalledOnce().CreateComment(Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetCapturedArguments() + _, _, _, comment, _ := client.VerifyWasCalledOnce().CreateComment( + Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()).GetCapturedArguments() expectedComment := "Locks and plans deleted for the projects and workspaces modified in this pull request:\n\n" + "- dir: `.` workspace: `default`" Equals(t, expectedComment, comment) diff --git a/server/events/pull_updater.go b/server/events/pull_updater.go index d8fcfe34f9..d85bd84f9d 100644 --- a/server/events/pull_updater.go +++ b/server/events/pull_updater.go @@ -3,6 +3,7 @@ package events import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/vcs" + "github.com/runatlantis/atlantis/server/utils" ) type PullUpdater struct { @@ -23,14 +24,31 @@ func (c *PullUpdater) updatePull(ctx *command.Context, cmd PullCommand, res comm // clutter in a pull/merge request. This will not delete the comment, since the // comment trail may be useful in auditing or backtracing problems. if c.HidePrevPlanComments { - ctx.Log.Debug("Hiding previous plan comments for command: '%v', directory: '%v'", cmd.CommandName().TitleString(), cmd.Dir()) - if err := c.VCSClient.HidePrevCommandComments(ctx.Pull.BaseRepo, ctx.Pull.Num, cmd.CommandName().TitleString(), cmd.Dir()); err != nil { + ctx.Log.Debug("hiding previous plan comments for command: '%v', directory: '%v'", cmd.CommandName().TitleString(), cmd.Dir()) + if err := c.VCSClient.HidePrevCommandComments(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull.Num, cmd.CommandName().TitleString(), cmd.Dir()); err != nil { ctx.Log.Err("unable to hide old comments: %s", err) } } - comment := c.MarkdownRenderer.Render(res, cmd.CommandName(), cmd.SubCommandName(), ctx.Log.GetHistory(), cmd.IsVerbose(), ctx.Pull.BaseRepo.VCSHost.Type) - if err := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, comment, cmd.CommandName().String()); err != nil { + if len(res.ProjectResults) > 0 { + var commentOnProjects []command.ProjectResult + for _, result := range res.ProjectResults { + if utils.SlicesContains(result.SilencePRComments, cmd.CommandName().String()) { + ctx.Log.Debug("silenced command '%s' comment for project '%s'", cmd.CommandName().String(), result.ProjectName) + continue + } + commentOnProjects = append(commentOnProjects, result) + } + + if len(commentOnProjects) == 0 { + return + } + + res.ProjectResults = commentOnProjects + } + + comment := c.MarkdownRenderer.Render(ctx, res, cmd) + if err := c.VCSClient.CreateComment(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull.Num, comment, cmd.CommandName().String()); err != nil { ctx.Log.Err("unable to comment: %s", err) } } diff --git a/server/events/repo_branch_test.go b/server/events/repo_branch_test.go index 4a6bb9e922..cc4521a20e 100644 --- a/server/events/repo_branch_test.go +++ b/server/events/repo_branch_test.go @@ -80,7 +80,7 @@ projects: repo, err := parser.ParseRepoCfg(tmp, global, "github.com/foo/bar", "main") require.NoError(t, err) - require.Equal(t, 1, len(repo.Projects)) + require.Len(t, repo.Projects, 1) t.Logf("Projects: %+v", repo.Projects) } diff --git a/server/events/team_allowlist_checker_test.go b/server/events/team_allowlist_checker_test.go deleted file mode 100644 index b389b49ae0..0000000000 --- a/server/events/team_allowlist_checker_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package events_test - -import ( - "testing" - - "github.com/runatlantis/atlantis/server/events" - . "github.com/runatlantis/atlantis/testing" -) - -func TestNewTeamAllowListChecker(t *testing.T) { - allowlist := `bob:plan, dave:apply` - _, err := events.NewTeamAllowlistChecker(allowlist) - Ok(t, err) -} - -func TestNewTeamAllowListCheckerEmpty(t *testing.T) { - allowlist := `` - checker, err := events.NewTeamAllowlistChecker(allowlist) - Ok(t, err) - Equals(t, false, checker.HasRules()) -} - -func TestIsCommandAllowedForTeam(t *testing.T) { - allowlist := `bob:plan, dave:apply, connie:plan, connie:apply` - checker, err := events.NewTeamAllowlistChecker(allowlist) - Ok(t, err) - Equals(t, true, checker.IsCommandAllowedForTeam("connie", "plan")) - Equals(t, true, checker.IsCommandAllowedForTeam("connie", "apply")) - Equals(t, true, checker.IsCommandAllowedForTeam("dave", "apply")) - Equals(t, true, checker.IsCommandAllowedForTeam("bob", "plan")) - Equals(t, false, checker.IsCommandAllowedForTeam("bob", "apply")) -} - -func TestIsCommandAllowedForAnyTeam(t *testing.T) { - allowlist := `alpha:plan,beta:release,*:unlock,nobody:*` - teams := []string{`alpha`, `beta`} - checker, err := events.NewTeamAllowlistChecker(allowlist) - Ok(t, err) - Equals(t, true, checker.IsCommandAllowedForAnyTeam(teams, `plan`)) - Equals(t, true, checker.IsCommandAllowedForAnyTeam(teams, `release`)) - Equals(t, true, checker.IsCommandAllowedForAnyTeam(teams, `unlock`)) - Equals(t, false, checker.IsCommandAllowedForAnyTeam(teams, `noop`)) -} diff --git a/server/events/templates/import_success_unwrapped.tmpl b/server/events/templates/import_success_unwrapped.tmpl index c8a8a1b19d..08b6336d4d 100644 --- a/server/events/templates/import_success_unwrapped.tmpl +++ b/server/events/templates/import_success_unwrapped.tmpl @@ -6,5 +6,7 @@ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `{{.RePlanCmd}}` + ```shell + {{.RePlanCmd}} + ``` {{ end -}} diff --git a/server/events/templates/import_success_wrapped.tmpl b/server/events/templates/import_success_wrapped.tmpl index 12711c1d4d..00d9689a38 100644 --- a/server/events/templates/import_success_wrapped.tmpl +++ b/server/events/templates/import_success_wrapped.tmpl @@ -8,5 +8,7 @@ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `{{ .RePlanCmd }}` + ```shell + {{ .RePlanCmd }} + ``` {{ end -}} diff --git a/server/events/templates/log.tmpl b/server/events/templates/log.tmpl index cb409801c7..305436eebb 100644 --- a/server/events/templates/log.tmpl +++ b/server/events/templates/log.tmpl @@ -1,7 +1,7 @@ {{ define "log" -}} -{{ if .Verbose }} +{{ if .Verbose -}}
Log -

+

``` {{.Log}}``` diff --git a/server/events/templates/merged_again.tmpl b/server/events/templates/merged_again.tmpl index 796afe552a..ece363f19e 100644 --- a/server/events/templates/merged_again.tmpl +++ b/server/events/templates/merged_again.tmpl @@ -1,5 +1,5 @@ {{ define "mergedAgain" -}} -{{ if .MergedAgain }} +{{ if .MergedAgain -}} :twisted_rightwards_arrows: Upstream was modified, a new merge was performed. {{ end -}} {{ end -}} diff --git a/server/events/templates/multi_project_apply.tmpl b/server/events/templates/multi_project_apply.tmpl index 50038555b3..2e2b2baa30 100644 --- a/server/events/templates/multi_project_apply.tmpl +++ b/server/events/templates/multi_project_apply.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectApply" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered }} diff --git a/server/events/templates/multi_project_header.tmpl b/server/events/templates/multi_project_header.tmpl index c202c7e50c..c1ce5dc053 100644 --- a/server/events/templates/multi_project_header.tmpl +++ b/server/events/templates/multi_project_header.tmpl @@ -3,5 +3,9 @@ Ran {{.Command}} for {{ len .Results }} projects: {{ range $result := .Results -}} 1. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` +{{ end -}} +{{ if (gt (len .Results) 0) -}} +--- + {{ end -}} {{ end -}} diff --git a/server/events/templates/multi_project_import.tmpl b/server/events/templates/multi_project_import.tmpl index 22e4b4388d..31cd70cbd4 100644 --- a/server/events/templates/multi_project_import.tmpl +++ b/server/events/templates/multi_project_import.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectImport" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered }} diff --git a/server/events/templates/multi_project_plan.tmpl b/server/events/templates/multi_project_plan.tmpl index 9c3898ad48..f57e96794a 100644 --- a/server/events/templates/multi_project_plan.tmpl +++ b/server/events/templates/multi_project_plan.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectPlan" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ $disableApplyAll := .DisableApplyAll -}} {{ $hideUnchangedPlans := .HideUnchangedPlanComments -}} {{ range $i, $result := .Results -}} diff --git a/server/events/templates/multi_project_plan_footer.tmpl b/server/events/templates/multi_project_plan_footer.tmpl index 41683ab018..1c193a16b7 100644 --- a/server/events/templates/multi_project_plan_footer.tmpl +++ b/server/events/templates/multi_project_plan_footer.tmpl @@ -4,10 +4,14 @@ {{ len .Results }} projects, {{ .NumPlansWithChanges }} with changes, {{ .NumPlansWithNoChanges }} with no changes, {{ .NumPlanFailures }} failed {{ if and (not .PlansDeleted) (ne .DisableApplyAll true) }} -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :fast_forward: To **apply** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` {{ end -}} {{ end -}} {{ end -}} diff --git a/server/events/templates/multi_project_policy.tmpl b/server/events/templates/multi_project_policy.tmpl index c34c59f896..add574fde4 100644 --- a/server/events/templates/multi_project_policy.tmpl +++ b/server/events/templates/multi_project_policy.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectPolicy" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ $disableApplyAll := .DisableApplyAll -}} {{ $hideUnchangedPlans := .HideUnchangedPlanComments -}} {{ range $i, $result := .Results -}} @@ -13,10 +13,14 @@ {{ end -}} {{ if ne .DisableApplyAll true -}} {{ if and (gt (len .Results) 0) (not .PlansDeleted) -}} -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :fast_forward: To **apply** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` {{ end -}} {{ end -}} {{ template "log" . -}} diff --git a/server/events/templates/multi_project_policy_unsuccessful.tmpl b/server/events/templates/multi_project_policy_unsuccessful.tmpl index a0a59fd994..039dd9ce7c 100644 --- a/server/events/templates/multi_project_policy_unsuccessful.tmpl +++ b/server/events/templates/multi_project_policy_unsuccessful.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectPolicyUnsuccessful" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ $disableApplyAll := .DisableApplyAll -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` @@ -11,12 +11,18 @@ {{ end -}} {{ if ne .DisableApplyAll true -}} {{ if and (gt (len .Results) 0) (not .PlansDeleted) -}} -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `{{ .ExecutableName }} plan` + ```shell + {{ .ExecutableName }} plan + ``` {{ end -}} {{ end -}} {{- template "log" . -}} diff --git a/server/events/templates/multi_project_state_rm.tmpl b/server/events/templates/multi_project_state_rm.tmpl index 90c0259dfe..a00464a7b8 100644 --- a/server/events/templates/multi_project_state_rm.tmpl +++ b/server/events/templates/multi_project_state_rm.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectStateRm" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered}} diff --git a/server/events/templates/multi_project_version.tmpl b/server/events/templates/multi_project_version.tmpl index 08266520e5..70eeea40f9 100644 --- a/server/events/templates/multi_project_version.tmpl +++ b/server/events/templates/multi_project_version.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectVersion" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered}} diff --git a/server/events/templates/plan_success_unwrapped.tmpl b/server/events/templates/plan_success_unwrapped.tmpl index 6bd81de233..e4ed2e0911 100644 --- a/server/events/templates/plan_success_unwrapped.tmpl +++ b/server/events/templates/plan_success_unwrapped.tmpl @@ -8,13 +8,17 @@ This plan was not saved because one or more projects failed and automerge requir {{ else -}} {{ if not .DisableApply -}} * :arrow_forward: To **apply** this plan, comment: - * `{{ .ApplyCmd }}` + ```shell + {{ .ApplyCmd }} + ``` {{ end -}} {{ if not .DisableRepoLocking -}} -* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }}) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }}) {{ end -}} * :repeat: To **plan** this project again, comment: - * `{{ .RePlanCmd }}` + ```shell + {{ .RePlanCmd }} + ``` {{ end -}} -{{ template "mergedAgain" . }} +{{ template "mergedAgain" . -}} {{ end -}} diff --git a/server/events/templates/plan_success_wrapped.tmpl b/server/events/templates/plan_success_wrapped.tmpl index cef96d0609..55c0d3042a 100644 --- a/server/events/templates/plan_success_wrapped.tmpl +++ b/server/events/templates/plan_success_wrapped.tmpl @@ -4,21 +4,25 @@ ```diff {{ if .EnableDiffMarkdownFormat }}{{ .DiffMarkdownFormattedTerraformOutput }}{{ else }}{{ .TerraformOutput }}{{ end }} ``` +

{{ if .PlanWasDeleted -}} This plan was not saved because one or more projects failed and automerge requires all plans pass. {{ else -}} {{ if not .DisableApply -}} * :arrow_forward: To **apply** this plan, comment: - * `{{ .ApplyCmd }}` + ```shell + {{ .ApplyCmd }} + ``` {{ end -}} {{ if not .DisableRepoLocking -}} -* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }}) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }}) {{ end -}} * :repeat: To **plan** this project again, comment: - * `{{ .RePlanCmd }}` + ```shell + {{ .RePlanCmd }} + ``` {{ end -}} - {{ .PlanSummary -}} {{ template "mergedAgain" . -}} {{ end -}} diff --git a/server/events/templates/policy_check_results_unwrapped.tmpl b/server/events/templates/policy_check_results_unwrapped.tmpl index 089e85660f..16d7b9e865 100644 --- a/server/events/templates/policy_check_results_unwrapped.tmpl +++ b/server/events/templates/policy_check_results_unwrapped.tmpl @@ -14,16 +14,22 @@ {{- end }} {{- if .PolicyCleared }} * :arrow_forward: To **apply** this plan, comment: - * `{{ .ApplyCmd }}` + ```shell + {{ .ApplyCmd }} + ``` {{- else }} #### Policy Approval Status: ``` {{ .PolicyApprovalSummary }} ``` * :heavy_check_mark: To **approve** this project, comment: - * `{{ .ApprovePoliciesCmd }}` + ```shell + {{ .ApprovePoliciesCmd }} + ``` {{- end }} -* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }}) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }}) * :repeat: To re-run policies **plan** this project again by commenting: - * `{{ .RePlanCmd }}` + ```shell + {{ .RePlanCmd }} + ``` {{ end -}} diff --git a/server/events/templates/policy_check_results_wrapped.tmpl b/server/events/templates/policy_check_results_wrapped.tmpl index bf03a6b1f1..49e38c46c4 100644 --- a/server/events/templates/policy_check_results_wrapped.tmpl +++ b/server/events/templates/policy_check_results_wrapped.tmpl @@ -2,9 +2,11 @@
Show Output {{- if eq .Command "Policy Check" }} {{- if ne .PreConftestOutput "" }} + ```diff {{ .PreConftestOutput }} ``` + {{- end -}} {{ template "policyCheck" .PolicySetResults }} {{- if ne .PostConftestOutput "" }} @@ -15,23 +17,33 @@ {{- end }} {{- if .PolicyCleared }} * :arrow_forward: To **apply** this plan, comment: - * `{{ .ApplyCmd }}` + ```shell + {{ .ApplyCmd }} + ``` {{- else }} +
+ #### Policy Approval Status: ``` {{ .PolicyApprovalSummary }} ``` * :heavy_check_mark: To **approve** this project, comment: - * `{{ .ApprovePoliciesCmd }}` + ```shell + {{ .ApprovePoliciesCmd }} + ``` {{- end }} -* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }}) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }}) * :repeat: To re-run policies **plan** this project again by commenting: - * `{{ .RePlanCmd }}` - + ```shell + {{ .RePlanCmd }} + ``` {{- if eq .Command "Policy Check" }} +{{- if ne .PolicyCheckSummary "" }} ``` {{ .PolicyCheckSummary }} ``` {{- end }} -{{ end -}} \ No newline at end of file + +{{- end }} +{{ end -}} diff --git a/server/events/templates/single_project_plan_success.tmpl b/server/events/templates/single_project_plan_success.tmpl index afbe3d5701..77f6e13d64 100644 --- a/server/events/templates/single_project_plan_success.tmpl +++ b/server/events/templates/single_project_plan_success.tmpl @@ -5,10 +5,14 @@ Ran {{ .Command }} for {{ if $result.ProjectName }}project: `{{ $result.ProjectN {{ $result.Rendered }} {{ if ne .DisableApplyAll true }} --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :fast_forward: To **apply** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` {{ end -}} {{ template "log" . -}} {{ end -}} diff --git a/server/events/templates/single_project_policy_unsuccessful.tmpl b/server/events/templates/single_project_policy_unsuccessful.tmpl index 0760406814..0bf0ac1a0c 100644 --- a/server/events/templates/single_project_policy_unsuccessful.tmpl +++ b/server/events/templates/single_project_policy_unsuccessful.tmpl @@ -3,14 +3,20 @@ Ran {{ .Command }} for {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered }} -{{ if ne .DisableApplyAll true }} +{{ if ne .DisableApplyAll true -}} --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `{{ .ExecutableName }} plan` + ```shell + {{ .ExecutableName }} plan + ``` {{ end -}} {{- template "log" . -}} {{ end -}} diff --git a/server/events/templates/state_rm_success_unwrapped.tmpl b/server/events/templates/state_rm_success_unwrapped.tmpl index c0f24323a5..564d8796ae 100644 --- a/server/events/templates/state_rm_success_unwrapped.tmpl +++ b/server/events/templates/state_rm_success_unwrapped.tmpl @@ -6,5 +6,7 @@ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `{{.RePlanCmd}}` + ```shell + {{.RePlanCmd}} + ``` {{ end }} diff --git a/server/events/templates/state_rm_success_wrapped.tmpl b/server/events/templates/state_rm_success_wrapped.tmpl index f182c85bc1..2a703107c6 100644 --- a/server/events/templates/state_rm_success_wrapped.tmpl +++ b/server/events/templates/state_rm_success_wrapped.tmpl @@ -8,5 +8,7 @@ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `{{.RePlanCmd}}` + ```shell + {{.RePlanCmd}} + ``` {{ end }} diff --git a/server/events/unlock_command_runner.go b/server/events/unlock_command_runner.go index dd2b4c45ef..af360adf83 100644 --- a/server/events/unlock_command_runner.go +++ b/server/events/unlock_command_runner.go @@ -42,7 +42,7 @@ func (u *UnlockCommandRunner) Run(ctx *command.Context, _ *CommentCommand) { var err error if disableUnlockLabel != "" { var labels []string - labels, err = u.vcsClient.GetPullLabels(baseRepo, ctx.Pull) + labels, err = u.vcsClient.GetPullLabels(ctx.Log, baseRepo, ctx.Pull) if err != nil { vcsMessage = "Failed to retrieve PR labels... Not unlocking" ctx.Log.Err("Failed to retrieve PR labels for pull %s", err.Error()) @@ -56,7 +56,7 @@ func (u *UnlockCommandRunner) Run(ctx *command.Context, _ *CommentCommand) { var numLocks int if err == nil && !hasLabel { - numLocks, err = u.deleteLockCommand.DeleteLocksByPull(baseRepo.FullName, pullNum) + numLocks, err = u.deleteLockCommand.DeleteLocksByPull(ctx.Log, baseRepo.FullName, pullNum) if err != nil { vcsMessage = "Failed to delete PR locks" ctx.Log.Err("failed to delete locks by pull %s", err.Error()) @@ -71,7 +71,7 @@ func (u *UnlockCommandRunner) Run(ctx *command.Context, _ *CommentCommand) { } } - if commentErr := u.vcsClient.CreateComment(baseRepo, pullNum, vcsMessage, command.Unlock.String()); commentErr != nil { + if commentErr := u.vcsClient.CreateComment(ctx.Log, baseRepo, pullNum, vcsMessage, command.Unlock.String()); commentErr != nil { ctx.Log.Err("unable to comment: %s", commentErr) } } diff --git a/server/events/vcs/azuredevops_client.go b/server/events/vcs/azuredevops_client.go index c89d490005..63344d85e7 100644 --- a/server/events/vcs/azuredevops_client.go +++ b/server/events/vcs/azuredevops_client.go @@ -13,6 +13,7 @@ import ( "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs/common" + "github.com/runatlantis/atlantis/server/logging" ) // AzureDevopsClient represents an Azure DevOps VCS client @@ -55,7 +56,7 @@ func NewAzureDevopsClient(hostname string, userName string, token string) (*Azur // GetModifiedFiles returns the names of files that were modified in the merge request // relative to the repo root, e.g. parent/child/file.txt. -func (g *AzureDevopsClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (g *AzureDevopsClient) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { var files []string owner, project, repoName := SplitAzureDevopsRepoFullName(repo.FullName) @@ -95,7 +96,7 @@ func (g *AzureDevopsClient) GetModifiedFiles(repo models.Repo, pull models.PullR // // If comment length is greater than the max comment length we split into // multiple comments. -func (g *AzureDevopsClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { //nolint: revive +func (g *AzureDevopsClient) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) error { //nolint: revive sepEnd := "\n```\n" + "\n
\n\n**Warning**: Output length greater than max comment size. Continued in next comment." sepStart := "Continued from previous comment.\n
Show Output\n\n" + @@ -106,7 +107,7 @@ func (g *AzureDevopsClient) CreateComment(repo models.Repo, pullNum int, comment // or tested limit in Azure DevOps. const maxCommentLength = 150000 - comments := common.SplitComment(comment, maxCommentLength, sepEnd, sepStart) + comments := common.SplitComment(comment, maxCommentLength, sepEnd, sepStart, 0, "") owner, project, repoName := SplitAzureDevopsRepoFullName(repo.FullName) for i := range comments { @@ -130,17 +131,17 @@ func (g *AzureDevopsClient) CreateComment(repo models.Repo, pullNum int, comment return nil } -func (g *AzureDevopsClient) ReactToComment(repo models.Repo, pullNum int, commentID int64, reaction string) error { //nolint: revive +func (g *AzureDevopsClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error { //nolint: revive return nil } -func (g *AzureDevopsClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string, dir string) error { //nolint: revive +func (g *AzureDevopsClient) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error { //nolint: revive return nil } // PullIsApproved returns true if the merge request was approved by another reviewer. // https://docs.microsoft.com/en-us/azure/devops/repos/git/branch-policies?view=azure-devops#require-a-minimum-number-of-reviewers -func (g *AzureDevopsClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { +func (g *AzureDevopsClient) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { owner, project, repoName := SplitAzureDevopsRepoFullName(repo.FullName) opts := azuredevops.PullRequestGetOptions{ @@ -176,7 +177,7 @@ func (g *AzureDevopsClient) DiscardReviews(repo models.Repo, pull models.PullReq } // PullIsMergeable returns true if the merge request can be merged. -func (g *AzureDevopsClient) PullIsMergeable(repo models.Repo, pull models.PullRequest, vcsstatusname string) (bool, error) { //nolint: revive +func (g *AzureDevopsClient) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, _ string, _ []string) (bool, error) { //nolint: revive owner, project, repoName := SplitAzureDevopsRepoFullName(repo.FullName) opts := azuredevops.PullRequestGetOptions{IncludeWorkItemRefs: true} @@ -227,7 +228,7 @@ func (g *AzureDevopsClient) PullIsMergeable(repo models.Repo, pull models.PullRe } // GetPullRequest returns the pull request. -func (g *AzureDevopsClient) GetPullRequest(repo models.Repo, num int) (*azuredevops.GitPullRequest, error) { +func (g *AzureDevopsClient) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, num int) (*azuredevops.GitPullRequest, error) { opts := azuredevops.PullRequestGetOptions{ IncludeWorkItemRefs: true, } @@ -237,7 +238,7 @@ func (g *AzureDevopsClient) GetPullRequest(repo models.Repo, num int) (*azuredev } // UpdateStatus updates the build status of a commit. -func (g *AzureDevopsClient) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { +func (g *AzureDevopsClient) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { adState := azuredevops.GitError.String() switch state { case models.PendingCommitStatus: @@ -303,7 +304,7 @@ func (g *AzureDevopsClient) UpdateStatus(repo models.Repo, pull models.PullReque // If the user has set a branch policy that disallows no fast-forward, the merge will fail // until we handle branch policies // https://docs.microsoft.com/en-us/azure/devops/repos/git/branch-policies?view=azure-devops -func (g *AzureDevopsClient) MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) error { +func (g *AzureDevopsClient) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error { owner, project, repoName := SplitAzureDevopsRepoFullName(pull.BaseRepo.FullName) descriptor := "Atlantis Terraform Pull Request Automation" @@ -315,7 +316,7 @@ func (g *AzureDevopsClient) MergePull(pull models.PullRequest, pullOptions model return fmt.Errorf("the user %s is not found in the organization %s", g.UserName, owner) } - imageURL := "https://github.com/runatlantis/atlantis/raw/main/runatlantis.io/.vuepress/public/hero.png" + imageURL := "https://raw.githubusercontent.com/runatlantis/atlantis/main/runatlantis.io/public/hero.png" id := azuredevops.IdentityRef{ Descriptor: &descriptor, ID: userID, @@ -398,7 +399,7 @@ func (g *AzureDevopsClient) SupportsSingleFileDownload(repo models.Repo) bool { return false } -func (g *AzureDevopsClient) GetFileContent(pull models.PullRequest, fileName string) (bool, []byte, error) { //nolint: revive +func (g *AzureDevopsClient) GetFileContent(_ logging.SimpleLogging, pull models.PullRequest, fileName string) (bool, []byte, error) { //nolint: revive return false, []byte{}, fmt.Errorf("not implemented") } @@ -421,10 +422,10 @@ func GitStatusContextFromSrc(src string) *azuredevops.GitStatusContext { } } -func (g *AzureDevopsClient) GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) { //nolint: revive +func (g *AzureDevopsClient) GetCloneURL(_ logging.SimpleLogging, VCSHostType models.VCSHostType, repo string) (string, error) { //nolint: revive return "", fmt.Errorf("not yet implemented") } -func (g *AzureDevopsClient) GetPullLabels(_ models.Repo, _ models.PullRequest) ([]string, error) { +func (g *AzureDevopsClient) GetPullLabels(_ logging.SimpleLogging, _ models.Repo, _ models.PullRequest) ([]string, error) { return nil, fmt.Errorf("not yet implemented") } diff --git a/server/events/vcs/azuredevops_client_test.go b/server/events/vcs/azuredevops_client_test.go index 1c6e142298..a7095262d2 100644 --- a/server/events/vcs/azuredevops_client_test.go +++ b/server/events/vcs/azuredevops_client_test.go @@ -15,10 +15,12 @@ import ( "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs" "github.com/runatlantis/atlantis/server/events/vcs/testdata" + "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) func TestAzureDevopsClient_MergePull(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { description string response string @@ -121,16 +123,18 @@ func TestAzureDevopsClient_MergePull(t *testing.T) { } fmt.Printf("Successfully merged pull request: %+v\n", merge) - err = client.MergePull(models.PullRequest{ - Num: 22, - BaseRepo: models.Repo{ - FullName: "owner/project/repo", - Owner: "owner", - Name: "repo", - }, - }, models.PullRequestOptions{ - DeleteSourceBranchOnMerge: false, - }) + err = client.MergePull( + logger, + models.PullRequest{ + Num: 22, + BaseRepo: models.Repo{ + FullName: "owner/project/repo", + Owner: "owner", + Name: "repo", + }, + }, models.PullRequestOptions{ + DeleteSourceBranchOnMerge: false, + }) if c.expErr == "" { Ok(t, err) } else { @@ -142,6 +146,7 @@ func TestAzureDevopsClient_MergePull(t *testing.T) { } func TestAzureDevopsClient_UpdateStatus(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { status models.CommitStatus expState string @@ -223,11 +228,14 @@ func TestAzureDevopsClient_UpdateStatus(t *testing.T) { Owner: "owner", Name: "repo", } - err = client.UpdateStatus(repo, models.PullRequest{ - Num: 22, - BaseRepo: repo, - HeadCommit: "sha", - }, c.status, "src", "description", "https://google.com") + err = client.UpdateStatus( + logger, + repo, + models.PullRequest{ + Num: 22, + BaseRepo: repo, + HeadCommit: "sha", + }, c.status, "src", "description", "https://google.com") Ok(t, err) Assert(t, gotRequest, "expected to get the request") }) @@ -237,6 +245,7 @@ func TestAzureDevopsClient_UpdateStatus(t *testing.T) { // GetModifiedFiles should make multiple requests if more than one page // and concat results. func TestAzureDevopsClient_GetModifiedFiles(t *testing.T) { + logger := logging.NewNoopLogger(t) itemRespTemplate := `{ "changes": [ { @@ -281,24 +290,27 @@ func TestAzureDevopsClient_GetModifiedFiles(t *testing.T) { Ok(t, err) defer disableSSLVerification()() - files, err := client.GetModifiedFiles(models.Repo{ - FullName: "owner/project/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.AzureDevops, - Hostname: "dev.azure.com", - }, - }, models.PullRequest{ - Num: 1, - }) + files, err := client.GetModifiedFiles( + logger, + models.Repo{ + FullName: "owner/project/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.AzureDevops, + Hostname: "dev.azure.com", + }, + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, []string{"file1.txt", "file2.txt"}, files) } func TestAzureDevopsClient_PullIsMergeable(t *testing.T) { + logger := logging.NewNoopLogger(t) type Policy struct { genre string name string @@ -402,19 +414,21 @@ func TestAzureDevopsClient_PullIsMergeable(t *testing.T) { defer disableSSLVerification()() - actMergeable, err := client.PullIsMergeable(models.Repo{ - FullName: "owner/project/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.AzureDevops, - Hostname: "dev.azure.com", - }, - }, models.PullRequest{ - Num: 1, - }, "atlantis-test") + actMergeable, err := client.PullIsMergeable( + logger, + models.Repo{ + FullName: "owner/project/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.AzureDevops, + Hostname: "dev.azure.com", + }, + }, models.PullRequest{ + Num: 1, + }, "atlantis-test", []string{}) Ok(t, err) Equals(t, c.expMergeable, actMergeable) }) @@ -422,6 +436,7 @@ func TestAzureDevopsClient_PullIsMergeable(t *testing.T) { } func TestAzureDevopsClient_PullIsApproved(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { testName string reviewerUniqueName string @@ -496,19 +511,21 @@ func TestAzureDevopsClient_PullIsApproved(t *testing.T) { defer disableSSLVerification()() - approvalStatus, err := client.PullIsApproved(models.Repo{ - FullName: "owner/project/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.AzureDevops, - Hostname: "dev.azure.com", - }, - }, models.PullRequest{ - Num: 1, - }) + approvalStatus, err := client.PullIsApproved( + logger, + models.Repo{ + FullName: "owner/project/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.AzureDevops, + Hostname: "dev.azure.com", + }, + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, c.expApproved, approvalStatus.IsApproved) }) @@ -516,6 +533,7 @@ func TestAzureDevopsClient_PullIsApproved(t *testing.T) { } func TestAzureDevopsClient_GetPullRequest(t *testing.T) { + logger := logging.NewNoopLogger(t) // Use a real Azure DevOps json response and edit the mergeable_state field. jsBytes, err := os.ReadFile("testdata/azuredevops-pr.json") Ok(t, err) @@ -540,17 +558,19 @@ func TestAzureDevopsClient_GetPullRequest(t *testing.T) { Ok(t, err) defer disableSSLVerification()() - _, err = client.GetPullRequest(models.Repo{ - FullName: "owner/project/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.AzureDevops, - Hostname: "dev.azure.com", - }, - }, 1) + _, err = client.GetPullRequest( + logger, + models.Repo{ + FullName: "owner/project/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.AzureDevops, + Hostname: "dev.azure.com", + }, + }, 1) Ok(t, err) }) } diff --git a/server/events/vcs/bitbucketcloud/client.go b/server/events/vcs/bitbucketcloud/client.go index a9b38b4d09..c9a88f0245 100644 --- a/server/events/vcs/bitbucketcloud/client.go +++ b/server/events/vcs/bitbucketcloud/client.go @@ -11,6 +11,7 @@ import ( validator "github.com/go-playground/validator/v10" "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" ) type Client struct { @@ -40,7 +41,7 @@ func NewClient(httpClient *http.Client, username string, password string, atlant // GetModifiedFiles returns the names of files that were modified in the merge request // relative to the repo root, e.g. parent/child/file.txt. -func (b *Client) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (b *Client) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { var files []string nextPageURL := fmt.Sprintf("%s/2.0/repositories/%s/pullrequests/%d/diffstat", b.BaseURL, repo.FullName, pull.Num) @@ -85,7 +86,7 @@ func (b *Client) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([] } // CreateComment creates a comment on the merge request. -func (b *Client) CreateComment(repo models.Repo, pullNum int, comment string, _ string) error { +func (b *Client) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, _ string) error { // NOTE: I tried to find the maximum size of a comment for bitbucket.org but // I got up to 200k chars without issue so for now I'm not going to bother // to detect this. @@ -101,17 +102,17 @@ func (b *Client) CreateComment(repo models.Repo, pullNum int, comment string, _ } // UpdateComment updates the body of a comment on the merge request. -func (b *Client) ReactToComment(_ models.Repo, _ int, _ int64, _ string) error { +func (b *Client) ReactToComment(_ logging.SimpleLogging, _ models.Repo, _ int, _ int64, _ string) error { // TODO: Bitbucket support for reactions return nil } -func (b *Client) HidePrevCommandComments(_ models.Repo, _ int, _ string, _ string) error { +func (b *Client) HidePrevCommandComments(_ logging.SimpleLogging, _ models.Repo, _ int, _ string, _ string) error { return nil } // PullIsApproved returns true if the merge request was approved. -func (b *Client) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { +func (b *Client) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { path := fmt.Sprintf("%s/2.0/repositories/%s/pullrequests/%d", b.BaseURL, repo.FullName, pull.Num) resp, err := b.makeRequest("GET", path, nil) if err != nil { @@ -138,7 +139,7 @@ func (b *Client) PullIsApproved(repo models.Repo, pull models.PullRequest) (appr } // PullIsMergeable returns true if the merge request has no conflicts and can be merged. -func (b *Client) PullIsMergeable(repo models.Repo, pull models.PullRequest, _ string) (bool, error) { +func (b *Client) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, _ string, _ []string) (bool, error) { nextPageURL := fmt.Sprintf("%s/2.0/repositories/%s/pullrequests/%d/diffstat", b.BaseURL, repo.FullName, pull.Num) // We'll only loop 1000 times as a safety measure. maxLoops := 1000 @@ -169,7 +170,7 @@ func (b *Client) PullIsMergeable(repo models.Repo, pull models.PullRequest, _ st } // UpdateStatus updates the status of a commit. -func (b *Client) UpdateStatus(repo models.Repo, pull models.PullRequest, status models.CommitStatus, src string, description string, url string) error { +func (b *Client) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, status models.CommitStatus, src string, description string, url string) error { bbState := "FAILED" switch status { case models.PendingCommitStatus: @@ -207,7 +208,7 @@ func (b *Client) UpdateStatus(repo models.Repo, pull models.PullRequest, status } // MergePull merges the pull request. -func (b *Client) MergePull(pull models.PullRequest, _ models.PullRequestOptions) error { +func (b *Client) MergePull(logger logging.SimpleLogging, pull models.PullRequest, _ models.PullRequestOptions) error { path := fmt.Sprintf("%s/2.0/repositories/%s/pullrequests/%d/merge", b.BaseURL, pull.BaseRepo.FullName, pull.Num) _, err := b.makeRequest("POST", path, nil) return err @@ -274,14 +275,14 @@ func (b *Client) SupportsSingleFileDownload(models.Repo) bool { // GetFileContent a repository file content from VCS (which support fetch a single file from repository) // The first return value indicates whether the repo contains a file or not // if BaseRepo had a file, its content will placed on the second return value -func (b *Client) GetFileContent(_ models.PullRequest, _ string) (bool, []byte, error) { +func (b *Client) GetFileContent(_ logging.SimpleLogging, _ models.PullRequest, _ string) (bool, []byte, error) { return false, []byte{}, fmt.Errorf("not implemented") } -func (b *Client) GetCloneURL(_ models.VCSHostType, _ string) (string, error) { +func (b *Client) GetCloneURL(_ logging.SimpleLogging, _ models.VCSHostType, _ string) (string, error) { return "", fmt.Errorf("not yet implemented") } -func (b *Client) GetPullLabels(_ models.Repo, _ models.PullRequest) ([]string, error) { +func (b *Client) GetPullLabels(_ logging.SimpleLogging, _ models.Repo, _ models.PullRequest) ([]string, error) { return nil, fmt.Errorf("not yet implemented") } diff --git a/server/events/vcs/bitbucketcloud/client_test.go b/server/events/vcs/bitbucketcloud/client_test.go index a193dc4743..59108a14f3 100644 --- a/server/events/vcs/bitbucketcloud/client_test.go +++ b/server/events/vcs/bitbucketcloud/client_test.go @@ -10,11 +10,15 @@ import ( "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud" + "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) +const diffstatURL = "/2.0/repositories/owner/repo/pullrequests/1/diffstat" + // Should follow pagination properly. func TestClient_GetModifiedFilesPagination(t *testing.T) { + logger := logging.NewNoopLogger(t) respTemplate := ` { "pagelen": 1, @@ -54,12 +58,12 @@ func TestClient_GetModifiedFilesPagination(t *testing.T) { testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { // The first request should hit this URL. - case "/2.0/repositories/owner/repo/pullrequests/1/diffstat": - resp := firstResp + fmt.Sprintf(`,"next": "%s/2.0/repositories/owner/repo/pullrequests/1/diffstat?page=2"}`, serverURL) + case diffstatURL: + resp := firstResp + fmt.Sprintf(`,"next": "%s%s?page=2"}`, serverURL, diffstatURL) w.Write([]byte(resp)) // nolint: errcheck return // The second should hit this URL. - case "/2.0/repositories/owner/repo/pullrequests/1/diffstat?page=2": + case fmt.Sprintf("%s?page=2", diffstatURL): w.Write([]byte(secondResp + "}")) // nolint: errcheck default: t.Errorf("got unexpected request at %q", r.RequestURI) @@ -73,25 +77,28 @@ func TestClient_GetModifiedFilesPagination(t *testing.T) { client := bitbucketcloud.NewClient(http.DefaultClient, "user", "pass", "runatlantis.io") client.BaseURL = testServer.URL - files, err := client.GetModifiedFiles(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.BitbucketCloud, - Hostname: "bitbucket.org", - }, - }, models.PullRequest{ - Num: 1, - }) + files, err := client.GetModifiedFiles( + logger, + models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.BitbucketCloud, + Hostname: "bitbucket.org", + }, + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, []string{"file1.txt", "file2.txt", "file3.txt"}, files) } // If the "old" key in the list of files is nil we shouldn't error. func TestClient_GetModifiedFilesOldNil(t *testing.T) { + logger := logging.NewNoopLogger(t) resp := ` { "pagelen": 500, @@ -120,7 +127,7 @@ func TestClient_GetModifiedFilesOldNil(t *testing.T) { testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { // The first request should hit this URL. - case "/2.0/repositories/owner/repo/pullrequests/1/diffstat": + case diffstatURL: w.Write([]byte(resp)) // nolint: errcheck return default: @@ -134,24 +141,27 @@ func TestClient_GetModifiedFilesOldNil(t *testing.T) { client := bitbucketcloud.NewClient(http.DefaultClient, "user", "pass", "runatlantis.io") client.BaseURL = testServer.URL - files, err := client.GetModifiedFiles(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.BitbucketCloud, - Hostname: "bitbucket.org", - }, - }, models.PullRequest{ - Num: 1, - }) + files, err := client.GetModifiedFiles( + logger, + models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.BitbucketCloud, + Hostname: "bitbucket.org", + }, + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, []string{"parent/child/file1.txt"}, files) } func TestClient_PullIsApproved(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { description string testdata string @@ -202,12 +212,14 @@ func TestClient_PullIsApproved(t *testing.T) { repo, err := models.NewRepo(models.BitbucketServer, "owner/repo", "https://bitbucket.org/owner/repo.git", "user", "token") Ok(t, err) - approvalStatus, err := client.PullIsApproved(repo, models.PullRequest{ - Num: 1, - HeadBranch: "branch", - Author: "author", - BaseRepo: repo, - }) + approvalStatus, err := client.PullIsApproved( + logger, + repo, models.PullRequest{ + Num: 1, + HeadBranch: "branch", + Author: "author", + BaseRepo: repo, + }) Ok(t, err) Equals(t, c.exp, approvalStatus.IsApproved) }) @@ -215,6 +227,7 @@ func TestClient_PullIsApproved(t *testing.T) { } func TestClient_PullIsMergeable(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := map[string]struct { DiffStat string ExpMergeable bool @@ -311,7 +324,7 @@ func TestClient_PullIsMergeable(t *testing.T) { t.Run(name, func(t *testing.T) { testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { - case "/2.0/repositories/owner/repo/pullrequests/1/diffstat": + case diffstatURL: w.Write([]byte(c.DiffStat)) // nolint: errcheck return default: @@ -325,19 +338,21 @@ func TestClient_PullIsMergeable(t *testing.T) { client := bitbucketcloud.NewClient(http.DefaultClient, "user", "pass", "runatlantis.io") client.BaseURL = testServer.URL - actMergeable, err := client.PullIsMergeable(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.BitbucketCloud, - Hostname: "bitbucket.org", - }, - }, models.PullRequest{ - Num: 1, - }, "atlantis-test") + actMergeable, err := client.PullIsMergeable( + logger, + models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.BitbucketCloud, + Hostname: "bitbucket.org", + }, + }, models.PullRequest{ + Num: 1, + }, "atlantis-test", []string{}) Ok(t, err) Equals(t, c.ExpMergeable, actMergeable) }) diff --git a/server/events/vcs/bitbucketserver/client.go b/server/events/vcs/bitbucketserver/client.go index b6c75f3197..83956db629 100644 --- a/server/events/vcs/bitbucketserver/client.go +++ b/server/events/vcs/bitbucketserver/client.go @@ -11,6 +11,7 @@ import ( "strings" "github.com/runatlantis/atlantis/server/events/vcs/common" + "github.com/runatlantis/atlantis/server/logging" validator "github.com/go-playground/validator/v10" "github.com/pkg/errors" @@ -64,7 +65,7 @@ func NewClient(httpClient *http.Client, username string, password string, baseUR // GetModifiedFiles returns the names of files that were modified in the merge request // relative to the repo root, e.g. parent/child/file.txt. -func (b *Client) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (b *Client) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { var files []string projectKey, err := b.GetProjectKey(repo.Name, repo.SanitizedCloneURL) @@ -133,10 +134,10 @@ func (b *Client) GetProjectKey(repoName string, cloneURL string) (string, error) // CreateComment creates a comment on the merge request. It will write multiple // comments if a single comment is too long. -func (b *Client) CreateComment(repo models.Repo, pullNum int, comment string, _ string) error { +func (b *Client) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, _ string) error { sepEnd := "\n```\n**Warning**: Output length greater than max comment size. Continued in next comment." sepStart := "Continued from previous comment.\n```diff\n" - comments := common.SplitComment(comment, maxCommentLength, sepEnd, sepStart) + comments := common.SplitComment(comment, maxCommentLength, sepEnd, sepStart, 0, "") for _, c := range comments { if err := b.postComment(repo, pullNum, c); err != nil { return err @@ -145,11 +146,11 @@ func (b *Client) CreateComment(repo models.Repo, pullNum int, comment string, _ return nil } -func (b *Client) ReactToComment(_ models.Repo, _ int, _ int64, _ string) error { +func (b *Client) ReactToComment(_ logging.SimpleLogging, _ models.Repo, _ int, _ int64, _ string) error { return nil } -func (b *Client) HidePrevCommandComments(_ models.Repo, _ int, _ string, _ string) error { +func (b *Client) HidePrevCommandComments(_ logging.SimpleLogging, _ models.Repo, _ int, _ string, _ string) error { return nil } @@ -169,7 +170,7 @@ func (b *Client) postComment(repo models.Repo, pullNum int, comment string) erro } // PullIsApproved returns true if the merge request was approved. -func (b *Client) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { +func (b *Client) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { projectKey, err := b.GetProjectKey(repo.Name, repo.SanitizedCloneURL) if err != nil { return approvalStatus, err @@ -202,7 +203,7 @@ func (b *Client) DiscardReviews(_ models.Repo, _ models.PullRequest) error { } // PullIsMergeable returns true if the merge request has no conflicts and can be merged. -func (b *Client) PullIsMergeable(repo models.Repo, pull models.PullRequest, _ string) (bool, error) { +func (b *Client) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, _ string, _ []string) (bool, error) { projectKey, err := b.GetProjectKey(repo.Name, repo.SanitizedCloneURL) if err != nil { return false, err @@ -226,7 +227,7 @@ func (b *Client) PullIsMergeable(repo models.Repo, pull models.PullRequest, _ st } // UpdateStatus updates the status of a commit. -func (b *Client) UpdateStatus(_ models.Repo, pull models.PullRequest, status models.CommitStatus, src string, description string, url string) error { +func (b *Client) UpdateStatus(logger logging.SimpleLogging, _ models.Repo, pull models.PullRequest, status models.CommitStatus, src string, description string, url string) error { bbState := "FAILED" switch status { case models.PendingCommitStatus: @@ -259,7 +260,7 @@ func (b *Client) UpdateStatus(_ models.Repo, pull models.PullRequest, status mod } // MergePull merges the pull request. -func (b *Client) MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) error { +func (b *Client) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error { projectKey, err := b.GetProjectKey(pull.BaseRepo.Name, pull.BaseRepo.SanitizedCloneURL) if err != nil { return err @@ -358,14 +359,14 @@ func (b *Client) SupportsSingleFileDownload(_ models.Repo) bool { // GetFileContent a repository file content from VCS (which support fetch a single file from repository) // The first return value indicates whether the repo contains a file or not // if BaseRepo had a file, its content will placed on the second return value -func (b *Client) GetFileContent(_ models.PullRequest, _ string) (bool, []byte, error) { +func (b *Client) GetFileContent(_ logging.SimpleLogging, _ models.PullRequest, _ string) (bool, []byte, error) { return false, []byte{}, fmt.Errorf("not implemented") } -func (b *Client) GetCloneURL(_ models.VCSHostType, _ string) (string, error) { +func (b *Client) GetCloneURL(_ logging.SimpleLogging, _ models.VCSHostType, _ string) (string, error) { return "", fmt.Errorf("not yet implemented") } -func (b *Client) GetPullLabels(_ models.Repo, _ models.PullRequest) ([]string, error) { +func (b *Client) GetPullLabels(_ logging.SimpleLogging, _ models.Repo, _ models.PullRequest) ([]string, error) { return nil, fmt.Errorf("not yet implemented") } diff --git a/server/events/vcs/bitbucketserver/client_test.go b/server/events/vcs/bitbucketserver/client_test.go index 73aa8b0962..4827b76ed1 100644 --- a/server/events/vcs/bitbucketserver/client_test.go +++ b/server/events/vcs/bitbucketserver/client_test.go @@ -13,6 +13,7 @@ import ( "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver" + "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) @@ -72,6 +73,7 @@ func TestClient_BasePath(t *testing.T) { // Should follow pagination properly. func TestClient_GetModifiedFilesPagination(t *testing.T) { + logger := logging.NewNoopLogger(t) respTemplate := ` { "values": [ @@ -120,18 +122,20 @@ func TestClient_GetModifiedFilesPagination(t *testing.T) { client, err := bitbucketserver.NewClient(http.DefaultClient, "user", "pass", serverURL, "runatlantis.io") Ok(t, err) - files, err := client.GetModifiedFiles(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - SanitizedCloneURL: fmt.Sprintf("%s/scm/ow/repo.git", serverURL), - VCSHost: models.VCSHost{ - Type: models.BitbucketCloud, - Hostname: "bitbucket.org", - }, - }, models.PullRequest{ - Num: 1, - }) + files, err := client.GetModifiedFiles( + logger, + models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + SanitizedCloneURL: fmt.Sprintf("%s/scm/ow/repo.git", serverURL), + VCSHost: models.VCSHost{ + Type: models.BitbucketCloud, + Hostname: "bitbucket.org", + }, + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, []string{"file1.txt", "file2.txt", "file3.txt"}, files) } @@ -139,6 +143,7 @@ func TestClient_GetModifiedFilesPagination(t *testing.T) { // Test that we use the correct version parameter in our call to merge the pull // request. func TestClient_MergePull(t *testing.T) { + logger := logging.NewNoopLogger(t) pullRequest, err := os.ReadFile(filepath.Join("testdata", "pull-request.json")) Ok(t, err) testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -161,33 +166,36 @@ func TestClient_MergePull(t *testing.T) { client, err := bitbucketserver.NewClient(http.DefaultClient, "user", "pass", testServer.URL, "runatlantis.io") Ok(t, err) - err = client.MergePull(models.PullRequest{ - Num: 1, - HeadCommit: "", - URL: "", - HeadBranch: "", - BaseBranch: "", - Author: "", - State: 0, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - SanitizedCloneURL: fmt.Sprintf("%s/scm/ow/repo.git", testServer.URL), - VCSHost: models.VCSHost{ - Type: models.BitbucketCloud, - Hostname: "bitbucket.org", + err = client.MergePull( + logger, + models.PullRequest{ + Num: 1, + HeadCommit: "", + URL: "", + HeadBranch: "", + BaseBranch: "", + Author: "", + State: 0, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + SanitizedCloneURL: fmt.Sprintf("%s/scm/ow/repo.git", testServer.URL), + VCSHost: models.VCSHost{ + Type: models.BitbucketCloud, + Hostname: "bitbucket.org", + }, }, - }, - }, models.PullRequestOptions{ - DeleteSourceBranchOnMerge: false, - }) + }, models.PullRequestOptions{ + DeleteSourceBranchOnMerge: false, + }) Ok(t, err) } // Test that we delete the source branch in our call to merge the pull // request. func TestClient_MergePullDeleteSourceBranch(t *testing.T) { + logger := logging.NewNoopLogger(t) pullRequest, err := os.ReadFile(filepath.Join("testdata", "pull-request.json")) Ok(t, err) testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -220,27 +228,31 @@ func TestClient_MergePullDeleteSourceBranch(t *testing.T) { client, err := bitbucketserver.NewClient(http.DefaultClient, "user", "pass", testServer.URL, "runatlantis.io") Ok(t, err) - err = client.MergePull(models.PullRequest{ - Num: 1, - HeadCommit: "", - URL: "", - HeadBranch: "foo", - BaseBranch: "", - Author: "", - State: 0, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - SanitizedCloneURL: fmt.Sprintf("%s/scm/ow/repo.git", testServer.URL), - VCSHost: models.VCSHost{ - Type: models.BitbucketServer, - Hostname: "bitbucket.org", + err = client.MergePull( + logger, + models.PullRequest{ + Num: 1, + HeadCommit: "", + URL: "", + HeadBranch: "foo", + BaseBranch: "", + Author: "", + State: 0, + BaseRepo: models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + SanitizedCloneURL: fmt.Sprintf("%s/scm/ow/repo.git", testServer.URL), + VCSHost: models.VCSHost{ + Type: models.BitbucketServer, + Hostname: "bitbucket.org", + }, }, }, - }, models.PullRequestOptions{ - DeleteSourceBranchOnMerge: true, - }) + models.PullRequestOptions{ + DeleteSourceBranchOnMerge: true, + }, + ) Ok(t, err) } diff --git a/server/events/vcs/client.go b/server/events/vcs/client.go index 9bcf972f6a..9e32981a82 100644 --- a/server/events/vcs/client.go +++ b/server/events/vcs/client.go @@ -15,6 +15,7 @@ package vcs import ( "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" ) //go:generate pegomock generate --package mocks -o mocks/mock_client.go github.com/runatlantis/atlantis/server/events/vcs Client @@ -23,13 +24,13 @@ import ( type Client interface { // GetModifiedFiles returns the names of files that were modified in the merge request // relative to the repo root, e.g. parent/child/file.txt. - GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) - CreateComment(repo models.Repo, pullNum int, comment string, command string) error + GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) + CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) error - ReactToComment(repo models.Repo, pullNum int, commentID int64, reaction string) error - HidePrevCommandComments(repo models.Repo, pullNum int, command string, dir string) error - PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) - PullIsMergeable(repo models.Repo, pull models.PullRequest, vcsstatusname string) (bool, error) + ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error + HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error + PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) + PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, vcsstatusname string, ignoreVCSStatusNames []string) (bool, error) // UpdateStatus updates the commit status to state for pull. src is the // source of this status. This should be relatively static across runs, // ex. atlantis/plan or atlantis/apply. @@ -37,19 +38,19 @@ type Client interface { // change across runs. // url is an optional link that users should click on for more information // about this status. - UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error + UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error DiscardReviews(repo models.Repo, pull models.PullRequest) error - MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) error + MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error MarkdownPullLink(pull models.PullRequest) (string, error) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) // GetFileContent a repository file content from VCS (which support fetch a single file from repository) // The first return value indicates whether the repo contains a file or not // if BaseRepo had a file, its content will placed on the second return value - GetFileContent(pull models.PullRequest, fileName string) (bool, []byte, error) + GetFileContent(logger logging.SimpleLogging, pull models.PullRequest, fileName string) (bool, []byte, error) SupportsSingleFileDownload(repo models.Repo) bool - GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) + GetCloneURL(logger logging.SimpleLogging, VCSHostType models.VCSHostType, repo string) (string, error) // GetPullLabels returns the labels of a pull request - GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) + GetPullLabels(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) } diff --git a/server/events/vcs/common/common.go b/server/events/vcs/common/common.go index bb5004ed08..b7c1028ac1 100644 --- a/server/events/vcs/common/common.go +++ b/server/events/vcs/common/common.go @@ -12,34 +12,45 @@ func AutomergeCommitMsg(pullNum int) string { return fmt.Sprintf("[Atlantis] Automatically merging after successful apply: PR #%d", pullNum) } -// SplitComment splits comment into a slice of comments that are under maxSize. -// It appends sepEnd to all comments that have a following comment. -// It prepends sepStart to all comments that have a preceding comment. -func SplitComment(comment string, maxSize int, sepEnd string, sepStart string) []string { +/* +SplitComment splits comment into a slice of comments that are under maxSize. +- It appends sepEnd to all comments that have a following comment. +- It prepends sepStart to all comments that have a preceding comment. +- If maxCommentsPerCommand is non-zero, it never returns more than maxCommentsPerCommand +comments, and it truncates the beginning of the comment to preserve the end of the comment string, +which usually contains more important information, such as warnings, errors, and the plan summary. +- SplitComment appends the truncationHeader to the first comment if it would have produced more comments. +*/ +func SplitComment(comment string, maxSize int, sepEnd string, sepStart string, maxCommentsPerCommand int, truncationHeader string) []string { if len(comment) <= maxSize { return []string{comment} } - maxWithSep := maxSize - len(sepEnd) - len(sepStart) + // No comment contains both sepEnd and truncationHeader, so we only have to count their max. + maxWithSep := maxSize - max(len(sepEnd), len(truncationHeader)) - len(sepStart) var comments []string - numComments := int(math.Ceil(float64(len(comment)) / float64(maxWithSep))) - for i := 0; i < numComments; i++ { - upTo := min(len(comment), (i+1)*maxWithSep) - portion := comment[i*maxWithSep : upTo] - if i < numComments-1 { - portion += sepEnd - } - if i > 0 { + numPotentialComments := int(math.Ceil(float64(len(comment)) / float64(maxWithSep))) + var numComments int + if maxCommentsPerCommand == 0 { + numComments = numPotentialComments + } else { + numComments = min(numPotentialComments, maxCommentsPerCommand) + } + isTruncated := numComments < numPotentialComments + upTo := len(comment) + for len(comments) < numComments { + downFrom := max(0, upTo-maxWithSep) + portion := comment[downFrom:upTo] + if len(comments)+1 != numComments { portion = sepStart + portion + } else if len(comments)+1 == numComments && isTruncated { + portion = truncationHeader + portion + } + if len(comments) != 0 { + portion = portion + sepEnd } - comments = append(comments, portion) + comments = append([]string{portion}, comments...) + upTo = downFrom } return comments } - -func min(a, b int) int { - if a < b { - return a - } - return b -} diff --git a/server/events/vcs/common/common_test.go b/server/events/vcs/common/common_test.go index 246bd49855..1f9d8e9d00 100644 --- a/server/events/vcs/common/common_test.go +++ b/server/events/vcs/common/common_test.go @@ -24,7 +24,7 @@ import ( // If under the maximum number of chars, we shouldn't split the comments. func TestSplitComment_UnderMax(t *testing.T) { comment := "comment under max size" - split := common.SplitComment(comment, len(comment)+1, "sepEnd", "sepStart") + split := common.SplitComment(comment, len(comment)+1, "sepEnd", "sepStart", 0, "") Equals(t, []string{comment}, split) } @@ -34,11 +34,11 @@ func TestSplitComment_TwoComments(t *testing.T) { comment := strings.Repeat("a", 1000) sepEnd := "-sepEnd" sepStart := "-sepStart" - split := common.SplitComment(comment, len(comment)-1, sepEnd, sepStart) + split := common.SplitComment(comment, len(comment)-1, sepEnd, sepStart, 0, "") expCommentLen := len(comment) - len(sepEnd) - len(sepStart) - 1 - expFirstComment := comment[:expCommentLen] - expSecondComment := comment[expCommentLen:] + expFirstComment := comment[:len(comment)-expCommentLen] + expSecondComment := comment[len(comment)-expCommentLen:] Equals(t, 2, len(split)) Equals(t, expFirstComment+sepEnd, split[0]) Equals(t, sepStart+expSecondComment, split[1]) @@ -51,14 +51,31 @@ func TestSplitComment_FourComments(t *testing.T) { sepEnd := "-sepEnd" sepStart := "-sepStart" max := (len(comment) / 4) + len(sepEnd) + len(sepStart) - split := common.SplitComment(comment, max, sepEnd, sepStart) + split := common.SplitComment(comment, max, sepEnd, sepStart, 0, "") expMax := len(comment) / 4 Equals(t, []string{ - comment[:expMax] + sepEnd, - sepStart + comment[expMax:expMax*2] + sepEnd, - sepStart + comment[expMax*2:expMax*3] + sepEnd, - sepStart + comment[expMax*3:]}, split) + comment[:len(comment)-expMax*3] + sepEnd, + sepStart + comment[len(comment)-expMax*3:len(comment)-expMax*2] + sepEnd, + sepStart + comment[len(comment)-expMax*2:len(comment)-expMax] + sepEnd, + sepStart + comment[len(comment)-expMax:]}, split) +} + +func TestSplitComment_Limited(t *testing.T) { + comment := strings.Repeat("a", 1000) + sepEnd := "-sepEnd" + sepStart := "-sepStart" + truncationHeader := "truncated-" + max := (len(comment) / 8) + max(len(sepEnd), len(truncationHeader)) + len(sepStart) + split := common.SplitComment(comment, max, sepEnd, sepStart, 5, truncationHeader) + + expMax := len(comment) / 8 + Equals(t, []string{ + truncationHeader + comment[len(comment)-expMax*5:len(comment)-expMax*4] + sepEnd, + sepStart + comment[len(comment)-expMax*4:len(comment)-expMax*3] + sepEnd, + sepStart + comment[len(comment)-expMax*3:len(comment)-expMax*2] + sepEnd, + sepStart + comment[len(comment)-expMax*2:len(comment)-expMax] + sepEnd, + sepStart + comment[len(comment)-expMax:]}, split) } func TestAutomergeCommitMsg(t *testing.T) { diff --git a/server/events/vcs/git_cred_writer.go b/server/events/vcs/git_cred_writer.go index eca5dc00d7..f877abcfdf 100644 --- a/server/events/vcs/git_cred_writer.go +++ b/server/events/vcs/git_cred_writer.go @@ -47,7 +47,7 @@ func WriteGitCreds(gitUser string, gitToken string, gitHostname string, home str if err := fileLineReplace(config, gitUser, gitHostname, credsFile); err != nil { return errors.Wrap(err, "replacing git credentials line for github app") } - logger.Info("updated git app credentials in %s", credsFile) + logger.Info("updated git credentials in %s", credsFile) } else { if err := fileAppend(config, credsFile); err != nil { return err diff --git a/server/events/vcs/gitea/client.go b/server/events/vcs/gitea/client.go new file mode 100644 index 0000000000..9867d4c8f9 --- /dev/null +++ b/server/events/vcs/gitea/client.go @@ -0,0 +1,517 @@ +// Copyright 2024 Martijn van der Kleijn & Florian Beisel +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "encoding/base64" + "fmt" + "strings" + "time" + + "code.gitea.io/sdk/gitea" + "github.com/pkg/errors" + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" +) + +// Emergency break for Gitea pagination (just in case) +// Set to 500 to prevent runaway situations +// Value chosen purposely high, though randomly. +const giteaPaginationEBreak = 500 + +type GiteaClient struct { + giteaClient *gitea.Client + username string + token string + pageSize int + ctx context.Context +} + +type GiteaPRReviewSummary struct { + Reviews []GiteaReview +} + +type GiteaReview struct { + ID int64 + Body string + Reviewer string + State gitea.ReviewStateType // e.g., "APPROVED", "PENDING", "REQUEST_CHANGES" + SubmittedAt time.Time +} + +type GiteaPullGetter interface { + GetPullRequest(repo models.Repo, pullNum int) (*gitea.PullRequest, error) +} + +// NewClient builds a client that makes API calls to Gitea. httpClient is the +// client to use to make the requests, username and password are used as basic +// auth in the requests, baseURL is the API's baseURL, ex. https://corp.com:7990. +// Don't include the API version, ex. '/1.0'. +func NewClient(baseURL string, username string, token string, pagesize int, logger logging.SimpleLogging) (*GiteaClient, error) { + logger.Debug("Creating new Gitea client for: %s", baseURL) + + giteaClient, err := gitea.NewClient(baseURL, + gitea.SetToken(token), + gitea.SetUserAgent("atlantis"), + ) + + if err != nil { + return nil, errors.Wrap(err, "creating gitea client") + } + + return &GiteaClient{ + giteaClient: giteaClient, + username: username, + token: token, + pageSize: pagesize, + ctx: context.Background(), + }, nil +} + +func (c *GiteaClient) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*gitea.PullRequest, error) { + logger.Debug("Getting Gitea pull request %d", pullNum) + + pr, resp, err := c.giteaClient.GetPullRequest(repo.Owner, repo.Name, int64(pullNum)) + + if err != nil { + logger.Debug("GET /repos/%v/%v/pulls/%d returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) + return nil, err + } + + return pr, nil +} + +// GetModifiedFiles returns the names of files that were modified in the merge request +// relative to the repo root, e.g. parent/child/file.txt. +func (c *GiteaClient) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + logger.Debug("Getting modified files for Gitea pull request %d", pull.Num) + + changedFiles := make([]string, 0) + page := 0 + nextPage := 1 + listOptions := gitea.ListPullRequestFilesOptions{ + ListOptions: gitea.ListOptions{ + Page: 1, + PageSize: c.pageSize, + }, + } + + for page < nextPage { + page = +1 + listOptions.ListOptions.Page = page + files, resp, err := c.giteaClient.ListPullRequestFiles(repo.Owner, repo.Name, int64(pull.Num), listOptions) + if err != nil { + logger.Debug("[page %d] GET /repos/%v/%v/pulls/%d/files returned: %v", page, repo.Owner, repo.Name, pull.Num, resp.StatusCode) + return nil, err + } + + for _, file := range files { + changedFiles = append(changedFiles, file.Filename) + } + + nextPage = resp.NextPage + + // Emergency break after giteaPaginationEBreak pages + if page >= giteaPaginationEBreak { + break + } + } + + return changedFiles, nil +} + +// CreateComment creates a comment on the merge request. As far as we're aware, Gitea has no built in max comment length right now. +func (c *GiteaClient) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) error { + logger.Debug("Creating comment on Gitea pull request %d", pullNum) + + opt := gitea.CreateIssueCommentOption{ + Body: comment, + } + + _, resp, err := c.giteaClient.CreateIssueComment(repo.Owner, repo.Name, int64(pullNum), opt) + + if err != nil { + logger.Debug("POST /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) + return err + } + + logger.Debug("Added comment to Gitea pull request %d: %s", pullNum, comment) + + return nil +} + +// ReactToComment adds a reaction to a comment. +func (c *GiteaClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error { + logger.Debug("Adding reaction to Gitea pull request comment %d", commentID) + + _, resp, err := c.giteaClient.PostIssueCommentReaction(repo.Owner, repo.Name, commentID, reaction) + + if err != nil { + logger.Debug("POST /repos/%v/%v/issues/comments/%d/reactions returned: %v", repo.Owner, repo.Name, commentID, resp.StatusCode) + return err + } + + return nil +} + +// HidePrevCommandComments hides the previous command comments from the pull +// request. +func (c *GiteaClient) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error { + logger.Debug("Hiding previous command comments on Gitea pull request %d", pullNum) + + var allComments []*gitea.Comment + + nextPage := int(1) + for { + // Initialize ListIssueCommentOptions with the current page + opts := gitea.ListIssueCommentOptions{ + ListOptions: gitea.ListOptions{ + Page: nextPage, + PageSize: c.pageSize, + }, + } + + comments, resp, err := c.giteaClient.ListIssueComments(repo.Owner, repo.Name, int64(pullNum), opts) + if err != nil { + logger.Debug("GET /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) + return err + } + + allComments = append(allComments, comments...) + + // Break the loop if there are no more pages to fetch + if resp.NextPage == 0 { + break + } + nextPage = resp.NextPage + } + + currentUser, resp, err := c.giteaClient.GetMyUserInfo() + if err != nil { + logger.Debug("GET /user returned: %v", resp.StatusCode) + return err + } + + summaryHeader := fmt.Sprintf("
Superseded Atlantis %s", command) + summaryFooter := "
" + lineFeed := "\n" + + for _, comment := range allComments { + if comment.Poster == nil || comment.Poster.UserName != currentUser.UserName { + continue + } + + body := strings.Split(comment.Body, "\n") + if len(body) == 0 || (!strings.Contains(strings.ToLower(body[0]), strings.ToLower(command)) && dir != "" && !strings.Contains(strings.ToLower(body[0]), strings.ToLower(dir))) { + continue + } + + supersededComment := summaryHeader + lineFeed + comment.Body + lineFeed + summaryFooter + lineFeed + + logger.Debug("Hiding comment %s", comment.ID) + _, _, err := c.giteaClient.EditIssueComment(repo.Owner, repo.Name, comment.ID, gitea.EditIssueCommentOption{ + Body: supersededComment, + }) + if err != nil { + return err + } + } + + return nil +} + +// PullIsApproved returns ApprovalStatus with IsApproved set to true if the pull request has a review that approved the PR. +func (c *GiteaClient) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { + logger.Debug("Checking if Gitea pull request %d is approved", pull.Num) + + page := 0 + nextPage := 1 + + approvalStatus := models.ApprovalStatus{ + IsApproved: false, + } + + listOptions := gitea.ListPullReviewsOptions{ + ListOptions: gitea.ListOptions{ + Page: 1, + PageSize: c.pageSize, + }, + } + + for page < nextPage { + page = +1 + listOptions.ListOptions.Page = page + pullReviews, resp, err := c.giteaClient.ListPullReviews(repo.Owner, repo.Name, int64(pull.Num), listOptions) + + if err != nil { + logger.Debug("GET /repos/%v/%v/pulls/%d/reviews returned: %v", repo.Owner, repo.Name, pull.Num, resp.StatusCode) + return approvalStatus, err + } + + for _, review := range pullReviews { + if review.State == gitea.ReviewStateApproved { + approvalStatus.IsApproved = true + approvalStatus.ApprovedBy = review.Reviewer.UserName + approvalStatus.Date = review.Submitted + + return approvalStatus, nil + } + } + + nextPage = resp.NextPage + + // Emergency break after giteaPaginationEBreak pages + if page >= giteaPaginationEBreak { + break + } + } + + return approvalStatus, nil +} + +// PullIsMergeable returns true if the pull request is mergeable +func (c *GiteaClient) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, _ string, _ []string) (bool, error) { + logger.Debug("Checking if Gitea pull request %d is mergeable", pull.Num) + + pullRequest, _, err := c.giteaClient.GetPullRequest(repo.Owner, repo.Name, int64(pull.Num)) + + if err != nil { + return false, err + } + + logger.Debug("Gitea pull request is mergeable: %v (%v)", pullRequest.Mergeable, pull.Num) + + return pullRequest.Mergeable, nil +} + +// UpdateStatus updates the commit status to state for pull. src is the +// source of this status. This should be relatively static across runs, +// ex. atlantis/plan or atlantis/apply. +// description is a description of this particular status update and can +// change across runs. +// url is an optional link that users should click on for more information +// about this status. +func (c *GiteaClient) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { + giteaState := gitea.StatusFailure + + switch state { + case models.PendingCommitStatus: + giteaState = gitea.StatusPending + case models.SuccessCommitStatus: + giteaState = gitea.StatusSuccess + case models.FailedCommitStatus: + giteaState = gitea.StatusFailure + } + + logger.Debug("Updating status on Gitea pull request %d for '%s' to '%s'", pull.Num, description, state) + + newStatusOption := gitea.CreateStatusOption{ + State: giteaState, + TargetURL: url, + Description: description, + } + + _, resp, err := c.giteaClient.CreateStatus(repo.Owner, repo.Name, pull.HeadCommit, newStatusOption) + + if err != nil { + logger.Debug("POST /repos/%v/%v/statuses/%s returned: %v", repo.Owner, repo.Name, pull.HeadCommit, resp.StatusCode) + return err + } + + logger.Debug("Gitea status for pull request updated: %v (%v)", state, pull.Num) + + return nil +} + +// DiscardReviews discards / dismisses all pull request reviews +func (c *GiteaClient) DiscardReviews(repo models.Repo, pull models.PullRequest) error { + page := 0 + nextPage := 1 + + dismissOptions := gitea.DismissPullReviewOptions{ + Message: "Dismissed by Atlantis", + } + + listOptions := gitea.ListPullReviewsOptions{ + ListOptions: gitea.ListOptions{ + Page: 1, + PageSize: c.pageSize, + }, + } + + for page < nextPage { + page = +1 + listOptions.ListOptions.Page = page + pullReviews, resp, err := c.giteaClient.ListPullReviews(repo.Owner, repo.Name, int64(pull.Num), listOptions) + + if err != nil { + return err + } + + for _, review := range pullReviews { + _, err := c.giteaClient.DismissPullReview(repo.Owner, repo.Name, int64(pull.Num), review.ID, dismissOptions) + + if err != nil { + return err + } + } + + nextPage = resp.NextPage + + // Emergency break after giteaPaginationEBreak pages + if page >= giteaPaginationEBreak { + break + } + } + + return nil +} + +func (c *GiteaClient) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error { + logger.Debug("Merging Gitea pull request %d", pull.Num) + + mergeOptions := gitea.MergePullRequestOption{ + Style: gitea.MergeStyleMerge, + Title: "Atlantis merge", + Message: "Automatic merge by Atlantis", + DeleteBranchAfterMerge: pullOptions.DeleteSourceBranchOnMerge, + ForceMerge: false, + HeadCommitId: pull.HeadCommit, + MergeWhenChecksSucceed: false, + } + + succeeded, resp, err := c.giteaClient.MergePullRequest(pull.BaseRepo.Owner, pull.BaseRepo.Name, int64(pull.Num), mergeOptions) + + if err != nil { + logger.Debug("POST /repos/%v/%v/pulls/%d/merge returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, pull.Num, resp.StatusCode) + return err + } + + if !succeeded { + return fmt.Errorf("merge failed: %s", resp.Status) + } + + return nil +} + +// MarkdownPullLink specifies the string used in a pull request comment to reference another pull request. +func (c *GiteaClient) MarkdownPullLink(pull models.PullRequest) (string, error) { + return fmt.Sprintf("#%d", pull.Num), nil +} + +// GetTeamNamesForUser returns the names of the teams or groups that the user belongs to (in the organization the repository belongs to). +func (c *GiteaClient) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { + // TODO: implement + return nil, errors.New("GetTeamNamesForUser not (yet) implemented for Gitea client") +} + +// GetFileContent a repository file content from VCS (which support fetch a single file from repository) +// The first return value indicates whether the repo contains a file or not +// if BaseRepo had a file, its content will placed on the second return value +func (c *GiteaClient) GetFileContent(logger logging.SimpleLogging, pull models.PullRequest, fileName string) (bool, []byte, error) { + logger.Debug("Getting file content for %s in Gitea pull request %d", fileName, pull.Num) + + content, resp, err := c.giteaClient.GetContents(pull.BaseRepo.Owner, pull.BaseRepo.Name, pull.HeadCommit, fileName) + + if err != nil { + logger.Debug("GET /repos/%v/%v/contents/%s?ref=%v returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, fileName, pull.HeadCommit, resp.StatusCode) + return false, nil, err + } + + if content.Type == "file" { + decodedData, err := base64.StdEncoding.DecodeString(*content.Content) + if err != nil { + return true, []byte{}, err + } + return true, decodedData, nil + } + + return false, nil, nil +} + +// SupportsSingleFileDownload returns true if the VCS supports downloading a single file +func (c *GiteaClient) SupportsSingleFileDownload(repo models.Repo) bool { + return true +} + +// GetCloneURL returns the clone URL of the repo +func (c *GiteaClient) GetCloneURL(logger logging.SimpleLogging, _ models.VCSHostType, repo string) (string, error) { + logger.Debug("Getting clone URL for %s", repo) + + parts := strings.Split(repo, "/") + if len(parts) < 2 { + return "", errors.New("invalid repo format, expected 'owner/repo'") + } + repository, _, err := c.giteaClient.GetRepo(parts[0], parts[1]) + if err != nil { + logger.Debug("GET /repos/%v/%v returned an error: %v", parts[0], parts[1], err) + return "", err + } + return repository.CloneURL, nil +} + +// GetPullLabels returns the labels of a pull request +func (c *GiteaClient) GetPullLabels(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + logger.Debug("Getting labels for Gitea pull request %d", pull.Num) + + page := 0 + nextPage := 1 + results := make([]string, 0) + + opts := gitea.ListLabelsOptions{ + ListOptions: gitea.ListOptions{ + Page: 0, + PageSize: c.pageSize, + }, + } + + for page < nextPage { + page = +1 + opts.ListOptions.Page = page + + labels, resp, err := c.giteaClient.GetIssueLabels(repo.Owner, repo.Name, int64(pull.Num), opts) + + if err != nil { + logger.Debug("GET /repos/%v/%v/issues/%d/labels?%v returned: %v", repo.Owner, repo.Name, pull.Num, "unknown", resp.StatusCode) + return nil, err + } + + for _, label := range labels { + results = append(results, label.Name) + } + + nextPage = resp.NextPage + + // Emergency break after giteaPaginationEBreak pages + if page >= giteaPaginationEBreak { + break + } + } + + return results, nil +} + +func ValidateSignature(payload []byte, signature string, secretKey []byte) error { + isValid, err := gitea.VerifyWebhookSignature(string(secretKey), signature, payload) + if err != nil { + return errors.New("signature verification internal error") + } + if !isValid { + return errors.New("invalid signature") + } + + return nil +} diff --git a/server/events/vcs/gitea/models.go b/server/events/vcs/gitea/models.go new file mode 100644 index 0000000000..e624578e24 --- /dev/null +++ b/server/events/vcs/gitea/models.go @@ -0,0 +1,30 @@ +// Copyright 2024 Florian Beisel +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import "code.gitea.io/sdk/gitea" + +type GiteaWebhookPayload struct { + Action string `json:"action"` + Number int `json:"number"` + PullRequest gitea.PullRequest `json:"pull_request"` +} + +type GiteaIssueCommentPayload struct { + Action string `json:"action"` + Comment gitea.Comment `json:"comment"` + Repository gitea.Repository `json:"repository"` + Issue gitea.Issue `json:"issue"` +} diff --git a/server/events/vcs/github_client.go b/server/events/vcs/github_client.go index 12c5500cfa..d9c3d90541 100644 --- a/server/events/vcs/github_client.go +++ b/server/events/vcs/github_client.go @@ -17,11 +17,15 @@ import ( "context" "encoding/base64" "fmt" + "maps" "net/http" + "slices" + "sort" + "strconv" "strings" "time" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" @@ -39,14 +43,49 @@ var ( pullRequestDismissalMessage = *githubv4.NewString("Dismissing reviews because of plan changes") ) +type GithubRepoIdCacheEntry struct { + RepoId githubv4.Int + LookupTime time.Time +} + +type GitHubRepoIdCache struct { + cache map[githubv4.String]GithubRepoIdCacheEntry +} + +func NewGitHubRepoIdCache() GitHubRepoIdCache { + return GitHubRepoIdCache{ + cache: make(map[githubv4.String]GithubRepoIdCacheEntry), + } +} + +func (c *GitHubRepoIdCache) Get(key githubv4.String) (githubv4.Int, bool) { + entry, ok := c.cache[key] + if !ok { + return githubv4.Int(0), false + } + if time.Since(entry.LookupTime) > time.Hour { + delete(c.cache, key) + return githubv4.Int(0), false + } + return entry.RepoId, true +} + +func (c *GitHubRepoIdCache) Set(key githubv4.String, value githubv4.Int) { + c.cache[key] = GithubRepoIdCacheEntry{ + RepoId: value, + LookupTime: time.Now(), + } +} + // GithubClient is used to perform GitHub actions. type GithubClient struct { - user string - client *github.Client - v4Client *githubv4.Client - ctx context.Context - logger logging.SimpleLogging - config GithubConfig + user string + client *github.Client + v4Client *githubv4.Client + ctx context.Context + config GithubConfig + maxCommentsPerCommand int + repoIdCache GitHubRepoIdCache } // GithubAppTemporarySecrets holds app credentials obtained from github after creation. @@ -77,7 +116,9 @@ type GithubPRReviewSummary struct { } // NewGithubClient returns a valid GitHub client. -func NewGithubClient(hostname string, credentials GithubCredentials, config GithubConfig, logger logging.SimpleLogging) (*GithubClient, error) { + +func NewGithubClient(hostname string, credentials GithubCredentials, config GithubConfig, maxCommentsPerCommand int, logger logging.SimpleLogging) (*GithubClient, error) { + logger.Debug("Creating new GitHub client for host: %s", hostname) transport, err := credentials.Client() if err != nil { return nil, errors.Wrap(err, "error initializing github authentication transport") @@ -90,7 +131,8 @@ func NewGithubClient(hostname string, credentials GithubCredentials, config Gith graphqlURL = "https://api.github.com/graphql" } else { apiURL := resolveGithubAPIURL(hostname) - client, err = github.NewEnterpriseClient(apiURL.String(), apiURL.String(), transport) + // TODO: Deprecated: Use NewClient(httpClient).WithEnterpriseURLs(baseURL, uploadURL) instead + client, err = github.NewEnterpriseClient(apiURL.String(), apiURL.String(), transport) //nolint:staticcheck if err != nil { return nil, err } @@ -106,19 +148,22 @@ func NewGithubClient(hostname string, credentials GithubCredentials, config Gith if err != nil { return nil, errors.Wrap(err, "getting user") } + return &GithubClient{ - user: user, - client: client, - v4Client: v4Client, - ctx: context.Background(), - logger: logger, - config: config, + user: user, + client: client, + v4Client: v4Client, + ctx: context.Background(), + config: config, + maxCommentsPerCommand: maxCommentsPerCommand, + repoIdCache: NewGitHubRepoIdCache(), }, nil } // GetModifiedFiles returns the names of files that were modified in the pull request // relative to the repo root, e.g. parent/child/file.txt. -func (g *GithubClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (g *GithubClient) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + logger.Debug("Getting modified files for GitHub pull request %d", pull.Num) var files []string nextPage := 0 @@ -142,7 +187,7 @@ listloop: pageFiles, resp, err := g.client.PullRequests.ListFiles(g.ctx, repo.Owner, repo.Name, pull.Num, &opts) if resp != nil { - g.logger.Debug("[attempt %d] GET /repos/%v/%v/pulls/%d/files returned: %v", i+1, repo.Owner, repo.Name, pull.Num, resp.StatusCode) + logger.Debug("[attempt %d] GET /repos/%v/%v/pulls/%d/files returned: %v", i+1, repo.Owner, repo.Name, pull.Num, resp.StatusCode) } if err != nil { ghErr, ok := err.(*github.ErrorResponse) @@ -175,7 +220,8 @@ listloop: // CreateComment creates a comment on the pull request. // If comment length is greater than the max comment length we split into // multiple comments. -func (g *GithubClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { +func (g *GithubClient) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) error { + logger.Debug("Creating comment on GitHub pull request %d", pullNum) var sepStart string sepEnd := "\n```\n
" + @@ -189,11 +235,15 @@ func (g *GithubClient) CreateComment(repo models.Repo, pullNum int, comment stri "```diff\n" } - comments := common.SplitComment(comment, maxCommentLength, sepEnd, sepStart) + truncationHeader := "> [!WARNING]\n" + + "> **Warning**: Command output is larger than the maximum number of comments per command. Output truncated.\n
Show Output\n\n" + + "```diff\n" + + comments := common.SplitComment(comment, maxCommentLength, sepEnd, sepStart, g.maxCommentsPerCommand, truncationHeader) for i := range comments { _, resp, err := g.client.Issues.CreateComment(g.ctx, repo.Owner, repo.Name, pullNum, &github.IssueComment{Body: &comments[i]}) if resp != nil { - g.logger.Debug("POST /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) + logger.Debug("POST /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) } if err != nil { return err @@ -203,15 +253,17 @@ func (g *GithubClient) CreateComment(repo models.Repo, pullNum int, comment stri } // ReactToComment adds a reaction to a comment. -func (g *GithubClient) ReactToComment(repo models.Repo, _ int, commentID int64, reaction string) error { +func (g *GithubClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, _ int, commentID int64, reaction string) error { + logger.Debug("Adding reaction to GitHub pull request comment %d", commentID) _, resp, err := g.client.Reactions.CreateIssueCommentReaction(g.ctx, repo.Owner, repo.Name, commentID, reaction) if resp != nil { - g.logger.Debug("POST /repos/%v/%v/issues/comments/%d/reactions returned: %v", repo.Owner, repo.Name, commentID, resp.StatusCode) + logger.Debug("POST /repos/%v/%v/issues/comments/%d/reactions returned: %v", repo.Owner, repo.Name, commentID, resp.StatusCode) } return err } -func (g *GithubClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string, dir string) error { +func (g *GithubClient) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error { + logger.Debug("Hiding previous command comments on GitHub pull request %d", pullNum) var allComments []*github.IssueComment nextPage := 0 for { @@ -221,7 +273,7 @@ func (g *GithubClient) HidePrevCommandComments(repo models.Repo, pullNum int, co ListOptions: github.ListOptions{Page: nextPage}, }) if resp != nil { - g.logger.Debug("GET /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) + logger.Debug("GET /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) } if err != nil { return errors.Wrap(err, "listing comments") @@ -271,7 +323,7 @@ func (g *GithubClient) HidePrevCommandComments(repo models.Repo, pullNum int, co Classifier: githubv4.ReportedContentClassifiersOutdated, SubjectID: comment.GetNodeID(), } - g.logger.Debug("Hiding comment %s", comment.GetNodeID()) + logger.Debug("Hiding comment %s", comment.GetNodeID()) if err := g.v4Client.Mutate(g.ctx, &m, input, nil); err != nil { return errors.Wrapf(err, "minimize comment %s", comment.GetNodeID()) } @@ -333,7 +385,8 @@ func (g *GithubClient) getPRReviews(repo models.Repo, pull models.PullRequest) ( } // PullIsApproved returns true if the pull request was approved. -func (g *GithubClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { +func (g *GithubClient) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { + logger.Debug("Checking if GitHub pull request %d is approved", pull.Num) nextPage := 0 for { opts := github.ListOptions{ @@ -344,7 +397,7 @@ func (g *GithubClient) PullIsApproved(repo models.Repo, pull models.PullRequest) } pageReviews, resp, err := g.client.PullRequests.ListReviews(g.ctx, repo.Owner, repo.Name, pull.Num, &opts) if resp != nil { - g.logger.Debug("GET /repos/%v/%v/pulls/%d/reviews returned: %v", repo.Owner, repo.Name, pull.Num, resp.StatusCode) + logger.Debug("GET /repos/%v/%v/pulls/%d/reviews returned: %v", repo.Owner, repo.Name, pull.Num, resp.StatusCode) } if err != nil { return approvalStatus, errors.Wrap(err, "getting reviews") @@ -399,126 +452,363 @@ func (g *GithubClient) DiscardReviews(repo models.Repo, pull models.PullRequest) return nil } -// isRequiredCheck is a helper function to determine if a check is required or not -func isRequiredCheck(check string, required []string) bool { - //in go1.18 can prob replace this with slices.Contains - for _, r := range required { - if r == check { - return true - } - } +type PageInfo struct { + EndCursor *githubv4.String + HasNextPage githubv4.Boolean +} - return false +type WorkflowFileReference struct { + Path githubv4.String + RepositoryId githubv4.Int + Sha *githubv4.String } -// GetCombinedStatusMinusApply checks Statuses for PR, excluding atlantis apply. Returns true if all other statuses are not in failure. -func (g *GithubClient) GetCombinedStatusMinusApply(repo models.Repo, pull *github.PullRequest, vcstatusname string) (bool, error) { - //check combined status api - status, resp, err := g.client.Repositories.GetCombinedStatus(g.ctx, *pull.Head.Repo.Owner.Login, repo.Name, *pull.Head.Ref, nil) - if resp != nil { - g.logger.Debug("GET /repos/%v/%v/commits/%s/status returned: %v", *pull.Head.Repo.Owner.Login, repo.Name, *pull.Head.Ref, resp.StatusCode) +func (original WorkflowFileReference) Copy() WorkflowFileReference { + copy := WorkflowFileReference{ + Path: original.Path, + RepositoryId: original.RepositoryId, + Sha: new(githubv4.String), } - if err != nil { - return false, errors.Wrap(err, "getting combined status") + if original.Sha != nil { + *copy.Sha = *original.Sha } + return copy +} - //iterate over statuses - return false if we find one that isn't "apply" and doesn't have state = "success" - for _, r := range status.Statuses { - if strings.HasPrefix(*r.Context, fmt.Sprintf("%s/%s", vcstatusname, command.Apply.String())) { - continue - } - if *r.State != "success" { - return false, nil - } +type WorkflowRun struct { + File struct { + Path githubv4.String + RepositoryFileUrl githubv4.String + RepositoryName githubv4.String } +} - //get required status checks - required, resp, err := g.client.Repositories.GetBranchProtection(context.Background(), repo.Owner, repo.Name, *pull.Base.Ref) - if resp != nil { - g.logger.Debug("GET /repos/%v/%v/branches/%s/protection returned: %v", repo.Owner, repo.Name, *pull.Base.Ref, resp.StatusCode) +type CheckRun struct { + Name githubv4.String + Conclusion githubv4.String + // Not currently used: GitHub API classifies as required if coming from ruleset, even when the ruleset is not enforced! + IsRequired githubv4.Boolean `graphql:"isRequired(pullRequestNumber: $number)"` + CheckSuite struct { + WorkflowRun *WorkflowRun } - if err != nil { - return false, errors.Wrap(err, "getting required status checks") +} + +func (original CheckRun) Copy() CheckRun { + copy := CheckRun{ + Name: original.Name, + Conclusion: original.Conclusion, + IsRequired: original.IsRequired, + CheckSuite: original.CheckSuite, + } + if original.CheckSuite.WorkflowRun != nil { + copy.CheckSuite.WorkflowRun = new(WorkflowRun) + *copy.CheckSuite.WorkflowRun = *original.CheckSuite.WorkflowRun } + return copy +} - if required.RequiredStatusChecks == nil { - return true, nil +type StatusContext struct { + Context githubv4.String + State githubv4.String + // Not currently used: GitHub API classifies as required if coming from ruleset, even when the ruleset is not enforced! + IsRequired githubv4.Boolean `graphql:"isRequired(pullRequestNumber: $number)"` +} + +func (g *GithubClient) LookupRepoId(repo githubv4.String) (githubv4.Int, error) { + // This function may get many calls for the same repo, and repo names are not often changed + // Utilize caching to reduce the number of API calls to GitHub + if repoId, ok := g.repoIdCache.Get(repo); ok { + return repoId, nil } - //check check suite/check run api - checksuites, resp, err := g.client.Checks.ListCheckSuitesForRef(context.Background(), *pull.Head.Repo.Owner.Login, repo.Name, *pull.Head.Ref, nil) - if resp != nil { - g.logger.Debug("GET /repos/%v/%v/commits/%s/check-suites returned: %v", *pull.Head.Repo.Owner.Login, repo.Name, *pull.Head.Ref, resp.StatusCode) + repoSplit := strings.Split(string(repo), "/") + if len(repoSplit) != 2 { + return githubv4.Int(0), fmt.Errorf("invalid repository name: %s", repo) } + + var query struct { + Repository struct { + DatabaseId githubv4.Int + } `graphql:"repository(owner: $owner, name: $name)"` + } + variables := map[string]interface{}{ + "owner": githubv4.String(repoSplit[0]), + "name": githubv4.String(repoSplit[1]), + } + + err := g.v4Client.Query(g.ctx, &query, variables) + if err != nil { - return false, errors.Wrap(err, "getting check suites for ref") + return githubv4.Int(0), errors.Wrap(err, "getting repository id from GraphQL") } - //iterate over check completed check suites - return false if we find one that doesnt have conclusion = "success" - for _, c := range checksuites.CheckSuites { - if *c.Status == "completed" { - //iterate over the runs inside the suite - suite, resp, err := g.client.Checks.ListCheckRunsCheckSuite(context.Background(), *pull.Head.Repo.Owner.Login, repo.Name, *c.ID, nil) - if resp != nil { - g.logger.Debug("GET /repos/%v/%v/check-suites/%d/check-runs returned: %v", *pull.Head.Repo.Owner.Login, repo.Name, *c.ID, resp.StatusCode) - } - if err != nil { - return false, errors.Wrap(err, "getting check runs for check suite") - } + g.repoIdCache.Set(repo, query.Repository.DatabaseId) - for _, r := range suite.CheckRuns { - //check to see if the check is required - if isRequiredCheck(*r.Name, required.RequiredStatusChecks.Contexts) { - if *c.Conclusion == "success" { - continue - } - return false, nil - } - //ignore checks that arent required - continue - } - } + return query.Repository.DatabaseId, nil +} + +func (g *GithubClient) WorkflowRunMatchesWorkflowFileReference(workflowRun WorkflowRun, workflowFileReference WorkflowFileReference) (bool, error) { + // Unfortunately, the GitHub API doesn't expose the repositoryId for the WorkflowRunFile from the statusCheckRollup. + // Conversely, it doesn't expose the repository name for the WorkflowFileReference from the RepositoryRuleConnection. + // Therefore, a second query is required to lookup the association between repositoryId and repositoryName. + repoId, err := g.LookupRepoId(workflowRun.File.RepositoryName) + if err != nil { + return false, err } - return true, nil + if !(repoId == workflowFileReference.RepositoryId && workflowRun.File.Path == workflowFileReference.Path) { + return false, nil + } else if workflowFileReference.Sha != nil { + return strings.Contains(string(workflowRun.File.RepositoryFileUrl), string(*workflowFileReference.Sha)), nil + } else { + return true, nil + } } -// GetPullReviewDecision gets the pull review decision, which takes into account CODEOWNERS -func (g *GithubClient) GetPullReviewDecision(repo models.Repo, pull models.PullRequest) (approvalStatus bool, err error) { +func (g *GithubClient) GetPullRequestMergeabilityInfo( + repo models.Repo, + pull *github.PullRequest, +) ( + reviewDecision githubv4.String, + requiredChecks []githubv4.String, + requiredWorkflows []WorkflowFileReference, + checkRuns []CheckRun, + statusContexts []StatusContext, + err error, +) { var query struct { Repository struct { PullRequest struct { - ReviewDecision string + ReviewDecision githubv4.String + BaseRef struct { + BranchProtectionRule struct { + RequiredStatusChecks []struct { + Context githubv4.String + } + } + Rules struct { + PageInfo PageInfo + Nodes []struct { + Type githubv4.String + RepositoryRuleset struct { + Enforcement githubv4.String + } + Parameters struct { + RequiredStatusChecksParameters struct { + RequiredStatusChecks []struct { + Context githubv4.String + } + } `graphql:"... on RequiredStatusChecksParameters"` + WorkflowsParameters struct { + Workflows []WorkflowFileReference + } `graphql:"... on WorkflowsParameters"` + } + } + } `graphql:"rules(first: 100, after: $ruleCursor)"` + } + Commits struct { + Nodes []struct { + Commit struct { + StatusCheckRollup struct { + Contexts struct { + PageInfo PageInfo + Nodes []struct { + Typename githubv4.String `graphql:"__typename"` + CheckRun CheckRun `graphql:"... on CheckRun"` + StatusContext StatusContext `graphql:"... on StatusContext"` + } + } `graphql:"contexts(first: 100, after: $contextCursor)"` + } + } + } + } `graphql:"commits(last: 1)"` } `graphql:"pullRequest(number: $number)"` } `graphql:"repository(owner: $owner, name: $name)"` } variables := map[string]interface{}{ - "owner": githubv4.String(repo.Owner), - "name": githubv4.String(repo.Name), - "number": githubv4.Int(pull.Num), + "owner": githubv4.String(repo.Owner), + "name": githubv4.String(repo.Name), + "number": githubv4.Int(*pull.Number), + "ruleCursor": (*githubv4.String)(nil), + "contextCursor": (*githubv4.String)(nil), + } + + requiredChecksSet := make(map[githubv4.String]any) + +pagination: + for { + err = g.v4Client.Query(g.ctx, &query, variables) + + if err != nil { + break pagination + } + + reviewDecision = query.Repository.PullRequest.ReviewDecision + + for _, rule := range query.Repository.PullRequest.BaseRef.BranchProtectionRule.RequiredStatusChecks { + requiredChecksSet[rule.Context] = struct{}{} + } + + for _, rule := range query.Repository.PullRequest.BaseRef.Rules.Nodes { + if rule.RepositoryRuleset.Enforcement != "ACTIVE" { + continue + } + switch rule.Type { + case "REQUIRED_STATUS_CHECKS": + for _, context := range rule.Parameters.RequiredStatusChecksParameters.RequiredStatusChecks { + requiredChecksSet[context.Context] = struct{}{} + } + case "WORKFLOWS": + for _, workflow := range rule.Parameters.WorkflowsParameters.Workflows { + requiredWorkflows = append(requiredWorkflows, workflow.Copy()) + } + default: + continue + } + } + + if len(query.Repository.PullRequest.Commits.Nodes) == 0 { + err = errors.New("no commits found on PR") + break pagination + } + + for _, context := range query.Repository.PullRequest.Commits.Nodes[0].Commit.StatusCheckRollup.Contexts.Nodes { + switch context.Typename { + case "CheckRun": + checkRuns = append(checkRuns, context.CheckRun.Copy()) + case "StatusContext": + statusContexts = append(statusContexts, context.StatusContext) + default: + err = fmt.Errorf("unknown type of status check, %q", context.Typename) + break pagination + } + } + + if !query.Repository.PullRequest.BaseRef.Rules.PageInfo.HasNextPage && + !query.Repository.PullRequest.Commits.Nodes[0].Commit.StatusCheckRollup.Contexts.PageInfo.HasNextPage { + break pagination + } + + if query.Repository.PullRequest.BaseRef.Rules.PageInfo.EndCursor != nil { + variables["ruleCursor"] = query.Repository.PullRequest.BaseRef.Rules.PageInfo.EndCursor + } + if query.Repository.PullRequest.Commits.Nodes[0].Commit.StatusCheckRollup.Contexts.PageInfo.EndCursor != nil { + variables["contextCursor"] = query.Repository.PullRequest.Commits.Nodes[0].Commit.StatusCheckRollup.Contexts.PageInfo.EndCursor + } } - err = g.v4Client.Query(g.ctx, &query, variables) if err != nil { - return approvalStatus, errors.Wrap(err, "getting reviewDecision") + return "", nil, nil, nil, nil, errors.Wrap(err, "fetching rulesets, branch protections and status checks from GraphQL") } - if query.Repository.PullRequest.ReviewDecision == "APPROVED" || len(query.Repository.PullRequest.ReviewDecision) == 0 { - return true, nil + for context := range requiredChecksSet { + requiredChecks = append(requiredChecks, context) + } + + return reviewDecision, requiredChecks, requiredWorkflows, checkRuns, statusContexts, nil +} + +func CheckRunPassed(checkRun CheckRun) bool { + return checkRun.Conclusion == "SUCCESS" || checkRun.Conclusion == "SKIPPED" || checkRun.Conclusion == "NEUTRAL" +} + +func StatusContextPassed(statusContext StatusContext, vcsstatusname string) bool { + return statusContext.State == "SUCCESS" +} + +func ExpectedCheckPassed(expectedContext githubv4.String, checkRuns []CheckRun, statusContexts []StatusContext, vcsstatusname string) bool { + for _, checkRun := range checkRuns { + if checkRun.Name == expectedContext { + return CheckRunPassed(checkRun) + } + } + + for _, statusContext := range statusContexts { + if statusContext.Context == expectedContext { + return StatusContextPassed(statusContext, vcsstatusname) + } + } + + return false +} + +func (g *GithubClient) ExpectedWorkflowPassed(expectedWorkflow WorkflowFileReference, checkRuns []CheckRun) (bool, error) { + for _, checkRun := range checkRuns { + if checkRun.CheckSuite.WorkflowRun == nil { + continue + } + match, err := g.WorkflowRunMatchesWorkflowFileReference(*checkRun.CheckSuite.WorkflowRun, expectedWorkflow) + if err != nil { + return false, err + } + if match { + return CheckRunPassed(checkRun), nil + } } return false, nil } +// IsMergeableMinusApply checks review decision (which takes into account CODEOWNERS) and required checks for PR (excluding the atlantis apply check). +func (g *GithubClient) IsMergeableMinusApply(logger logging.SimpleLogging, repo models.Repo, pull *github.PullRequest, vcsstatusname string, ignoreVCSStatusNames []string) (bool, error) { + if pull.Number == nil { + return false, errors.New("pull request number is nil") + } + reviewDecision, requiredChecks, requiredWorkflows, checkRuns, statusContexts, err := g.GetPullRequestMergeabilityInfo(repo, pull) + if err != nil { + return false, err + } + + notMergeablePrefix := fmt.Sprintf("Pull Request %s/%s:%s is not mergeable", repo.Owner, repo.Name, strconv.Itoa(*pull.Number)) + + // Review decision takes CODEOWNERS into account + // Empty review decision means review is not required + if reviewDecision != "APPROVED" && len(reviewDecision) != 0 { + logger.Debug("%s: Review Decision: %s", notMergeablePrefix, reviewDecision) + return false, nil + } + + // The statusCheckRollup does not always contain all required checks + // For example, if a check was made required after the pull request was opened, it would be missing + // Go through all checks and workflows required by branch protection or rulesets + // Make sure that they can all be found in the statusCheckRollup and that they all pass + for _, requiredCheck := range requiredChecks { + if strings.HasPrefix(string(requiredCheck), fmt.Sprintf("%s/%s", vcsstatusname, command.Apply.String())) { + // Ignore atlantis apply check(s) + continue + } + if !slices.Contains(ignoreVCSStatusNames, GetVCSStatusNameFromRequiredCheck(requiredCheck)) && !ExpectedCheckPassed(requiredCheck, checkRuns, statusContexts, vcsstatusname) { + logger.Debug("%s: Expected Required Check: %s VCS Status Name: %s Ignore VCS Status Names: %s", notMergeablePrefix, requiredCheck, vcsstatusname, ignoreVCSStatusNames) + return false, nil + } + } + for _, requiredWorkflow := range requiredWorkflows { + passed, err := g.ExpectedWorkflowPassed(requiredWorkflow, checkRuns) + if err != nil { + return false, err + } + if !passed { + logger.Debug("%s: Expected Required Workflow: RepositoryId: %d Path: %s", notMergeablePrefix, requiredWorkflow.RepositoryId, requiredWorkflow.Path) + return false, nil + } + } + + return true, nil +} + +func GetVCSStatusNameFromRequiredCheck(requiredCheck githubv4.String) string { + return strings.Split(string(requiredCheck), "/")[0] +} + // PullIsMergeable returns true if the pull request is mergeable. -func (g *GithubClient) PullIsMergeable(repo models.Repo, pull models.PullRequest, vcsstatusname string) (bool, error) { - githubPR, err := g.GetPullRequest(repo, pull.Num) +func (g *GithubClient) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, vcsstatusname string, ignoreVCSStatusNames []string) (bool, error) { + logger.Debug("Checking if GitHub pull request %d is mergeable", pull.Num) + githubPR, err := g.GetPullRequest(logger, repo, pull.Num) if err != nil { return false, errors.Wrap(err, "getting pull request") } - state := githubPR.GetMergeableState() // We map our mergeable check to when the GitHub merge button is clickable. // This corresponds to the following states: // clean: No conflicts, all requirements satisfied. @@ -528,37 +818,27 @@ func (g *GithubClient) PullIsMergeable(repo models.Repo, pull models.PullRequest // has_hooks: GitHub Enterprise only, if a repo has custom pre-receive // hooks. Merging is allowed (green box). // See: https://github.com/octokit/octokit.net/issues/1763 - if state != "clean" && state != "unstable" && state != "has_hooks" { - //mergeable bypass apply code hidden by feature flag + switch githubPR.GetMergeableState() { + case "clean", "unstable", "has_hooks": + return true, nil + case "blocked": if g.config.AllowMergeableBypassApply { - g.logger.Debug("AllowMergeableBypassApply feature flag is enabled - attempting to bypass apply from mergeable requirements") - if state == "blocked" { - //check status excluding atlantis apply - status, err := g.GetCombinedStatusMinusApply(repo, githubPR, vcsstatusname) - if err != nil { - return false, errors.Wrap(err, "getting pull request status") - } - - //check to see if pr is approved using reviewDecision - approved, err := g.GetPullReviewDecision(repo, pull) - if err != nil { - return false, errors.Wrap(err, "getting pull request reviewDecision") - } - - //if all other status checks EXCEPT atlantis/apply are successful, and the PR is approved based on reviewDecision, let it proceed - if status && approved { - return true, nil - } + logger.Debug("AllowMergeableBypassApply feature flag is enabled - attempting to bypass apply from mergeable requirements") + isMergeableMinusApply, err := g.IsMergeableMinusApply(logger, repo, githubPR, vcsstatusname, ignoreVCSStatusNames) + if err != nil { + return false, errors.Wrap(err, "getting pull request status") } + return isMergeableMinusApply, nil } - + return false, nil + default: return false, nil } - return true, nil } // GetPullRequest returns the pull request. -func (g *GithubClient) GetPullRequest(repo models.Repo, num int) (*github.PullRequest, error) { +func (g *GithubClient) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, num int) (*github.PullRequest, error) { + logger.Debug("Getting GitHub pull request %d", num) var err error var pull *github.PullRequest @@ -574,7 +854,7 @@ func (g *GithubClient) GetPullRequest(repo models.Repo, num int) (*github.PullRe pull, resp, err := g.client.PullRequests.Get(g.ctx, repo.Owner, repo.Name, num) if resp != nil { - g.logger.Debug("GET /repos/%v/%v/pulls/%d returned: %v", repo.Owner, repo.Name, num, resp.StatusCode) + logger.Debug("GET /repos/%v/%v/pulls/%d returned: %v", repo.Owner, repo.Name, num, resp.StatusCode) } if err == nil { return pull, nil @@ -589,7 +869,7 @@ func (g *GithubClient) GetPullRequest(repo models.Repo, num int) (*github.PullRe // UpdateStatus updates the status badge on the pull request. // See https://github.com/blog/1227-commit-status-api. -func (g *GithubClient) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { +func (g *GithubClient) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { ghState := "error" switch state { case models.PendingCommitStatus: @@ -599,6 +879,7 @@ func (g *GithubClient) UpdateStatus(repo models.Repo, pull models.PullRequest, s case models.FailedCommitStatus: ghState = "failure" } + logger.Debug("Updating status on GitHub pull request %d for '%s' to '%s'", pull.Num, description, ghState) status := &github.RepoStatus{ State: github.String(ghState), @@ -608,33 +889,59 @@ func (g *GithubClient) UpdateStatus(repo models.Repo, pull models.PullRequest, s } _, resp, err := g.client.Repositories.CreateStatus(g.ctx, repo.Owner, repo.Name, pull.HeadCommit, status) if resp != nil { - g.logger.Debug("POST /repos/%v/%v/statuses/%s returned: %v", repo.Owner, repo.Name, pull.HeadCommit, resp.StatusCode) + logger.Debug("POST /repos/%v/%v/statuses/%s returned: %v", repo.Owner, repo.Name, pull.HeadCommit, resp.StatusCode) } return err } // MergePull merges the pull request. -func (g *GithubClient) MergePull(pull models.PullRequest, _ models.PullRequestOptions) error { +func (g *GithubClient) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error { + logger.Debug("Merging GitHub pull request %d", pull.Num) // Users can set their repo to disallow certain types of merging. // We detect which types aren't allowed and use the type that is. repo, resp, err := g.client.Repositories.Get(g.ctx, pull.BaseRepo.Owner, pull.BaseRepo.Name) if resp != nil { - g.logger.Debug("GET /repos/%v/%v returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, resp.StatusCode) + logger.Debug("GET /repos/%v/%v returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, resp.StatusCode) } if err != nil { return errors.Wrap(err, "fetching repo info") } + const ( defaultMergeMethod = "merge" rebaseMergeMethod = "rebase" squashMergeMethod = "squash" ) - method := defaultMergeMethod - if !repo.GetAllowMergeCommit() { - if repo.GetAllowRebaseMerge() { - method = rebaseMergeMethod - } else if repo.GetAllowSquashMerge() { - method = squashMergeMethod + + mergeMethodsAllow := map[string]func() bool{ + defaultMergeMethod: repo.GetAllowMergeCommit, + rebaseMergeMethod: repo.GetAllowRebaseMerge, + squashMergeMethod: repo.GetAllowSquashMerge, + } + + mergeMethodsName := slices.Collect(maps.Keys(mergeMethodsAllow)) + sort.Strings(mergeMethodsName) + + var method string + if pullOptions.MergeMethod != "" { + method = pullOptions.MergeMethod + + isMethodAllowed, isMethodExist := mergeMethodsAllow[method] + if !isMethodExist { + return fmt.Errorf("Merge method '%s' is unknown. Specify one of the valid values: '%s'", method, strings.Join(mergeMethodsName, ", ")) + } + + if !isMethodAllowed() { + return fmt.Errorf("Merge method '%s' is not allowed by the repository Pull Request settings", method) + } + } else { + method = defaultMergeMethod + if !repo.GetAllowMergeCommit() { + if repo.GetAllowRebaseMerge() { + method = rebaseMergeMethod + } else if repo.GetAllowSquashMerge() { + method = squashMergeMethod + } } } @@ -642,7 +949,7 @@ func (g *GithubClient) MergePull(pull models.PullRequest, _ models.PullRequestOp options := &github.PullRequestOptions{ MergeMethod: method, } - g.logger.Debug("PUT /repos/%v/%v/pulls/%d/merge", repo.Owner, repo.Name, pull.Num) + logger.Debug("PUT /repos/%v/%v/pulls/%d/merge", repo.Owner, repo.Name, pull.Num) mergeResult, resp, err := g.client.PullRequests.Merge( g.ctx, pull.BaseRepo.Owner, @@ -653,7 +960,7 @@ func (g *GithubClient) MergePull(pull models.PullRequest, _ models.PullRequestOp "", options) if resp != nil { - g.logger.Debug("POST /repos/%v/%v/pulls/%d/merge returned: %v", repo.Owner, repo.Name, pull.Num, resp.StatusCode) + logger.Debug("POST /repos/%v/%v/pulls/%d/merge returned: %v", repo.Owner, repo.Name, pull.Num, resp.StatusCode) } if err != nil { return errors.Wrap(err, "merging pull request") @@ -713,11 +1020,12 @@ func (g *GithubClient) GetTeamNamesForUser(repo models.Repo, user models.User) ( } // ExchangeCode returns a newly created app's info -func (g *GithubClient) ExchangeCode(code string) (*GithubAppTemporarySecrets, error) { +func (g *GithubClient) ExchangeCode(logger logging.SimpleLogging, code string) (*GithubAppTemporarySecrets, error) { + logger.Debug("Exchanging code for app secrets") ctx := context.Background() cfg, resp, err := g.client.Apps.CompleteAppManifest(ctx, code) if resp != nil { - g.logger.Debug("POST /app-manifests/%s/conversions returned: %v", code, resp.StatusCode) + logger.Debug("POST /app-manifests/%s/conversions returned: %v", code, resp.StatusCode) } data := &GithubAppTemporarySecrets{ ID: cfg.GetID(), @@ -733,11 +1041,12 @@ func (g *GithubClient) ExchangeCode(code string) (*GithubAppTemporarySecrets, er // GetFileContent a repository file content from VCS (which support fetch a single file from repository) // The first return value indicates whether the repo contains a file or not // if BaseRepo had a file, its content will placed on the second return value -func (g *GithubClient) GetFileContent(pull models.PullRequest, fileName string) (bool, []byte, error) { +func (g *GithubClient) GetFileContent(logger logging.SimpleLogging, pull models.PullRequest, fileName string) (bool, []byte, error) { + logger.Debug("Getting file content for %s in GitHub pull request %d", fileName, pull.Num) opt := github.RepositoryContentGetOptions{Ref: pull.HeadBranch} fileContent, _, resp, err := g.client.Repositories.GetContents(g.ctx, pull.BaseRepo.Owner, pull.BaseRepo.Name, fileName, &opt) if resp != nil { - g.logger.Debug("GET /repos/%v/%v/contents/%s returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, fileName, resp.StatusCode) + logger.Debug("GET /repos/%v/%v/contents/%s returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, fileName, resp.StatusCode) } if resp.StatusCode == http.StatusNotFound { @@ -759,11 +1068,12 @@ func (g *GithubClient) SupportsSingleFileDownload(_ models.Repo) bool { return true } -func (g *GithubClient) GetCloneURL(_ models.VCSHostType, repo string) (string, error) { +func (g *GithubClient) GetCloneURL(logger logging.SimpleLogging, _ models.VCSHostType, repo string) (string, error) { + logger.Debug("Getting clone URL for %s", repo) parts := strings.Split(repo, "/") repository, resp, err := g.client.Repositories.Get(g.ctx, parts[0], parts[1]) if resp != nil { - g.logger.Debug("GET /repos/%v/%v returned: %v", parts[0], parts[1], resp.StatusCode) + logger.Debug("GET /repos/%v/%v returned: %v", parts[0], parts[1], resp.StatusCode) } if err != nil { return "", err @@ -771,10 +1081,11 @@ func (g *GithubClient) GetCloneURL(_ models.VCSHostType, repo string) (string, e return repository.GetCloneURL(), nil } -func (g *GithubClient) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (g *GithubClient) GetPullLabels(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + logger.Debug("Getting labels for GitHub pull request %d", pull.Num) pullDetails, resp, err := g.client.PullRequests.Get(g.ctx, repo.Owner, repo.Name, pull.Num) if resp != nil { - g.logger.Debug("GET /repos/%v/%v/pulls/%d returned: %v", repo.Owner, repo.Name, pull.Num, resp.StatusCode) + logger.Debug("GET /repos/%v/%v/pulls/%d returned: %v", repo.Owner, repo.Name, pull.Num, resp.StatusCode) } if err != nil { return nil, err diff --git a/server/events/vcs/github_client_internal_test.go b/server/events/vcs/github_client_internal_test.go index 8809b0a44a..63f7a73e6b 100644 --- a/server/events/vcs/github_client_internal_test.go +++ b/server/events/vcs/github_client_internal_test.go @@ -22,14 +22,14 @@ import ( // If the hostname is github.com, should use normal BaseURL. func TestNewGithubClient_GithubCom(t *testing.T) { - client, err := NewGithubClient("github.com", &GithubUserCredentials{"user", "pass"}, GithubConfig{}, logging.NewNoopLogger(t)) + client, err := NewGithubClient("github.com", &GithubUserCredentials{"user", "pass", ""}, GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) Equals(t, "https://api.github.com/", client.client.BaseURL.String()) } // If the hostname is a non-github hostname should use the right BaseURL. func TestNewGithubClient_NonGithub(t *testing.T) { - client, err := NewGithubClient("example.com", &GithubUserCredentials{"user", "pass"}, GithubConfig{}, logging.NewNoopLogger(t)) + client, err := NewGithubClient("example.com", &GithubUserCredentials{"user", "pass", ""}, GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) Equals(t, "https://example.com/api/v3/", client.client.BaseURL.String()) // If possible in the future, test the GraphQL client's URL as well. But at the diff --git a/server/events/vcs/github_client_test.go b/server/events/vcs/github_client_test.go index acb7d07b10..81ec7ee7a4 100644 --- a/server/events/vcs/github_client_test.go +++ b/server/events/vcs/github_client_test.go @@ -63,23 +63,25 @@ func TestGithubClient_GetModifiedFiles(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logger) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logger) Ok(t, err) defer disableSSLVerification()() - files, err := client.GetModifiedFiles(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }) + files, err := client.GetModifiedFiles( + logger, + models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.Github, + Hostname: "github.com", + }, + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, []string{"file1.txt", "file2.txt"}, files) } @@ -87,6 +89,7 @@ func TestGithubClient_GetModifiedFiles(t *testing.T) { // GetModifiedFiles should include the source and destination of a moved // file. func TestGithubClient_GetModifiedFilesMovedFile(t *testing.T) { + logger := logging.NewNoopLogger(t) resp := `[ { "sha": "bbcd538c8e72b8c175046e27cc8f907076331401", @@ -118,28 +121,31 @@ func TestGithubClient_GetModifiedFilesMovedFile(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() - files, err := client.GetModifiedFiles(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }) + files, err := client.GetModifiedFiles( + logger, + models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.Github, + Hostname: "github.com", + }, + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, []string{"new/filename.txt", "previous/filename.txt"}, files) } func TestGithubClient_PaginatesComments(t *testing.T) { + logger := logging.NewNoopLogger(t) calls := 0 issueResps := []string{ `[ @@ -212,11 +218,12 @@ func TestGithubClient_PaginatesComments(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() err = client.HidePrevCommandComments( + logger, models.Repo{ FullName: "owner/repo", Owner: "owner", @@ -241,6 +248,7 @@ func TestGithubClient_PaginatesComments(t *testing.T) { } func TestGithubClient_HideOldComments(t *testing.T) { + logger := logging.NewNoopLogger(t) atlantisUser := "AtlantisUser" pullRequestNum := 123 issueResp := strings.ReplaceAll(`[ @@ -326,12 +334,13 @@ func TestGithubClient_HideOldComments(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{atlantisUser, "pass"}, vcs.GithubConfig{}, + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{atlantisUser, "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() err = client.HidePrevCommandComments( + logger, models.Repo{ FullName: "owner/repo", Owner: "owner", @@ -358,6 +367,7 @@ func TestGithubClient_HideOldComments(t *testing.T) { } func TestGithubClient_UpdateStatus(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { status models.CommitStatus expState string @@ -397,29 +407,32 @@ func TestGithubClient_UpdateStatus(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() - err = client.UpdateStatus(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }, c.status, "src", "description", "https://google.com") + err = client.UpdateStatus( + logger, + models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.Github, + Hostname: "github.com", + }, + }, models.PullRequest{ + Num: 1, + }, c.status, "src", "description", "https://google.com") Ok(t, err) }) } } func TestGithubClient_PullIsApproved(t *testing.T) { + logger := logging.NewNoopLogger(t) respTemplate := `[ { "id": %d, @@ -483,28 +496,31 @@ func TestGithubClient_PullIsApproved(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() - approvalStatus, err := client.PullIsApproved(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }) + approvalStatus, err := client.PullIsApproved( + logger, + models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.Github, + Hostname: "github.com", + }, + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, false, approvalStatus.IsApproved) } func TestGithubClient_PullIsMergeable(t *testing.T) { + logger := logging.NewNoopLogger(t) vcsStatusName := "atlantis-test" cases := []struct { state string @@ -553,22 +569,6 @@ func TestGithubClient_PullIsMergeable(t *testing.T) { Ok(t, err) prJSON := string(jsBytes) - // Status Check Response - jsBytes, err = os.ReadFile("testdata/github-commit-status-full.json") - Ok(t, err) - commitJSON := string(jsBytes) - - //reviewDecision Response - reviewDecision := `{ - "data": { - "repository": { - "pullRequest": { - "reviewDecision": "REVIEW_REQUIRED" - } - } - } - }` - for _, c := range cases { t.Run(c.state, func(t *testing.T) { response := strings.Replace(prJSON, @@ -583,13 +583,6 @@ func TestGithubClient_PullIsMergeable(t *testing.T) { case "/api/v3/repos/owner/repo/pulls/1": w.Write([]byte(response)) // nolint: errcheck return - case "/api/v3/repos/owner/repo/pulls/1/reviews?per_page=300": - w.Write([]byte("[]")) // nolint: errcheck - return - case "/api/v3/repos/owner/repo/commits/new-topic/status": - w.Write([]byte(commitJSON)) // nolint: errcheck - case "/api/graphql": - w.Write([]byte(reviewDecision)) // nolint: errcheck default: t.Errorf("got unexpected request at %q", r.RequestURI) http.Error(w, "not found", http.StatusNotFound) @@ -598,23 +591,25 @@ func TestGithubClient_PullIsMergeable(t *testing.T) { })) testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() - actMergeable, err := client.PullIsMergeable(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }, vcsStatusName) + actMergeable, err := client.PullIsMergeable( + logger, + models.Repo{ + FullName: "owner/repo", + Owner: "owner", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.Github, + Hostname: "github.com", + }, + }, models.PullRequest{ + Num: 1, + }, vcsStatusName, []string{}) Ok(t, err) Equals(t, c.expMergeable, actMergeable) }) @@ -622,166 +617,199 @@ func TestGithubClient_PullIsMergeable(t *testing.T) { } func TestGithubClient_PullIsMergeableWithAllowMergeableBypassApply(t *testing.T) { + logger := logging.NewNoopLogger(t) vcsStatusName := "atlantis" + ignoreVCSStatusNames := []string{"other-atlantis"} cases := []struct { - state string - reviewDecision string - expMergeable bool + state string + statusCheckRollupFilePath string + reviewDecision string + expMergeable bool }{ { "dirty", + "ruleset-atlantis-apply-pending.json", `"REVIEW_REQUIRED"`, false, }, { "unknown", + "ruleset-atlantis-apply-pending.json", `"REVIEW_REQUIRED"`, false, }, { "blocked", + "ruleset-atlantis-apply-pending.json", `"REVIEW_REQUIRED"`, false, }, { "blocked", + "ruleset-atlantis-apply-pending.json", `"APPROVED"`, true, }, { "blocked", + "ruleset-atlantis-apply-pending.json", "null", true, }, { "behind", + "ruleset-atlantis-apply-pending.json", `"REVIEW_REQUIRED"`, false, }, { "random", + "ruleset-atlantis-apply-pending.json", `"REVIEW_REQUIRED"`, false, }, { "unstable", + "ruleset-atlantis-apply-pending.json", `"REVIEW_REQUIRED"`, true, }, { "has_hooks", + "ruleset-atlantis-apply-pending.json", `"APPROVED"`, true, }, { "clean", + "ruleset-atlantis-apply-pending.json", `"APPROVED"`, true, }, { "", + "ruleset-atlantis-apply-pending.json", `"APPROVED"`, false, }, - } - - // Use a real GitHub json response and edit the mergeable_state field. - jsBytes, err := os.ReadFile("testdata/github-pull-request.json") - Ok(t, err) - prJSON := string(jsBytes) - - // Status Check Response - jsBytes, err = os.ReadFile("testdata/github-commit-status-full.json") - Ok(t, err) - commitJSON := string(jsBytes) - - // Branch protection Response - jsBytes, err = os.ReadFile("testdata/github-branch-protection-required-checks.json") - Ok(t, err) - branchProtectionJSON := string(jsBytes) - - // List check suites Response - jsBytes, err = os.ReadFile("testdata/github-commit-check-suites.json") - Ok(t, err) - checkSuites := string(jsBytes) - - for _, c := range cases { - t.Run(c.state, func(t *testing.T) { - response := strings.Replace(prJSON, - `"mergeable_state": "clean"`, - fmt.Sprintf(`"mergeable_state": "%s"`, c.state), - 1, - ) - - // reviewDecision Response - reviewDecision := fmt.Sprintf(`{ - "data": { - "repository": { - "pullRequest": { - "reviewDecision": %s - } - } - } - }`, c.reviewDecision) - - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.RequestURI { - case "/api/v3/repos/octocat/repo/pulls/1": - w.Write([]byte(response)) // nolint: errcheck - return - case "/api/v3/repos/octocat/repo/pulls/1/reviews?per_page=300": - w.Write([]byte("[]")) // nolint: errcheck - return - case "/api/v3/repos/octocat/repo/commits/new-topic/status": - w.Write([]byte(commitJSON)) // nolint: errcheck - case "/api/graphql": - w.Write([]byte(reviewDecision)) // nolint: errcheck - case "/api/v3/repos/octocat/repo/branches/main/protection": - w.Write([]byte(branchProtectionJSON)) // nolint: errcheck - case "/api/v3/repos/octocat/repo/commits/new-topic/check-suites": - w.Write([]byte(checkSuites)) // nolint: errcheck - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{AllowMergeableBypassApply: true}, logging.NewNoopLogger(t)) - Ok(t, err) - defer disableSSLVerification()() - - actMergeable, err := client.PullIsMergeable(models.Repo{ - FullName: "octocat/repo", - Owner: "octocat", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }, vcsStatusName) - Ok(t, err) - Equals(t, c.expMergeable, actMergeable) - }) - } -} - -func TestGithubClient_PullIsMergeableWithAllowMergeableBypassApplyButWithNoBranchProtectionChecks(t *testing.T) { - vcsStatusName := "atlantis" - cases := []struct { - state string - reviewDecision string - expMergeable bool - }{ { "blocked", - `"REVIEW_REQUIRED"`, + "ruleset-atlantis-apply-expected.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-optional-check-failed.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-optional-status-failed.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-check-pending.json", + `"APPROVED"`, + false, + }, + { + "blocked", + "ruleset-check-pending-other-atlantis.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-check-skipped.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-check-neutral.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-evaluate-workflow-failed.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "branch-protection-expected.json", + `"APPROVED"`, + false, + }, + { + "blocked", + "branch-protection-failed.json", + `"APPROVED"`, + false, + }, + { + "blocked", + "branch-protection-passed.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-check-expected.json", + `"APPROVED"`, + false, + }, + { + "blocked", + "ruleset-check-failed.json", + `"APPROVED"`, + false, + }, + { + "blocked", + "ruleset-check-failed-other-atlantis.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-check-passed.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-workflow-expected.json", + `"APPROVED"`, + false, + }, + { + "blocked", + "ruleset-workflow-failed.json", + `"APPROVED"`, + false, + }, + { + "blocked", + "ruleset-workflow-passed.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-workflow-passed-sha-match.json", + `"APPROVED"`, + true, + }, + { + "blocked", + "ruleset-workflow-passed-sha-mismatch.json", + `"APPROVED"`, false, }, } @@ -791,25 +819,9 @@ func TestGithubClient_PullIsMergeableWithAllowMergeableBypassApplyButWithNoBranc Ok(t, err) prJSON := string(jsBytes) - // Status Check Response - jsBytes, err = os.ReadFile("testdata/github-commit-status-full.json") - Ok(t, err) - commitJSON := string(jsBytes) - - // Branch protection Response - jsBytes, err = os.ReadFile("testdata/github-branch-protection-no-required-checks.json") - Ok(t, err) - branchProtectionJSON := string(jsBytes) - - // List check suites Response - jsBytes, err = os.ReadFile("testdata/github-commit-check-suites-completed.json") + jsBytes, err = os.ReadFile("testdata/github-pull-request-mergeability/repository-id.json") Ok(t, err) - checkSuites := string(jsBytes) - - // List check runs in a check suite - jsBytes, err = os.ReadFile("testdata/github-commit-check-suites-check-runs-completed.json") - Ok(t, err) - checkRuns := string(jsBytes) + repoIdJSON := string(jsBytes) for _, c := range cases { t.Run(c.state, func(t *testing.T) { @@ -819,36 +831,41 @@ func TestGithubClient_PullIsMergeableWithAllowMergeableBypassApplyButWithNoBranc 1, ) - // reviewDecision Response - reviewDecision := fmt.Sprintf(`{ - "data": { - "repository": { - "pullRequest": { - "reviewDecision": %s - } - } - } - }`, c.reviewDecision) + // PR review decision and checks statuses Response + jsBytes, err = os.ReadFile("testdata/github-pull-request-mergeability/" + c.statusCheckRollupFilePath) + Ok(t, err) + prMergeableStatusJSON := string(jsBytes) + + // PR review decision and checks statuses Response + prMergeableStatus := strings.Replace(prMergeableStatusJSON, + `"reviewDecision": null,`, + fmt.Sprintf(`"reviewDecision": %s,`, c.reviewDecision), + 1, + ) testServer := httptest.NewTLSServer( http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { - case "/api/v3/repos/octocat/Hello-World/pulls/1": + case "/api/v3/repos/octocat/repo/pulls/1": w.Write([]byte(response)) // nolint: errcheck return - case "/api/v3/repos/octocat/Hello-World/pulls/1/reviews?per_page=300": - w.Write([]byte("[]")) // nolint: errcheck - return - case "/api/v3/repos/octocat/Hello-World/commits/new-topic/status": - w.Write([]byte(commitJSON)) // nolint: errcheck case "/api/graphql": - w.Write([]byte(reviewDecision)) // nolint: errcheck - case "/api/v3/repos/octocat/Hello-World/branches/main/protection": - w.Write([]byte(branchProtectionJSON)) // nolint: errcheck - case "/api/v3/repos/octocat/Hello-World/commits/new-topic/check-suites": - w.Write([]byte(checkSuites)) // nolint: errcheck - case "/api/v3/repos/octocat/Hello-World/check-suites/1234567890/check-runs": - w.Write([]byte(checkRuns)) // nolint: errcheck + body, err := io.ReadAll(r.Body) + if err != nil { + t.Errorf("read body error: %v", err) + http.Error(w, "", http.StatusInternalServerError) + return + } + if strings.Contains(string(body), "pullRequest(") { + w.Write([]byte(prMergeableStatus)) // nolint: errcheck + return + } else if strings.Contains(string(body), "databaseId") { + w.Write([]byte(repoIdJSON)) // nolint: errcheck + return + } + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + return default: t.Errorf("got unexpected request at %q", r.RequestURI) http.Error(w, "not found", http.StatusNotFound) @@ -857,23 +874,25 @@ func TestGithubClient_PullIsMergeableWithAllowMergeableBypassApplyButWithNoBranc })) testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{AllowMergeableBypassApply: true}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{AllowMergeableBypassApply: true}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() - actMergeable, err := client.PullIsMergeable(models.Repo{ - FullName: "octocat/Hello-World", - Owner: "octocat", - Name: "Hello-World", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }, vcsStatusName) + actMergeable, err := client.PullIsMergeable( + logger, + models.Repo{ + FullName: "octocat/repo", + Owner: "octocat", + Name: "repo", + CloneURL: "", + SanitizedCloneURL: "", + VCSHost: models.VCSHost{ + Type: models.Github, + Hostname: "github.com", + }, + }, models.PullRequest{ + Num: 1, + }, vcsStatusName, ignoreVCSStatusNames) Ok(t, err) Equals(t, c.expMergeable, actMergeable) }) @@ -881,6 +900,7 @@ func TestGithubClient_PullIsMergeableWithAllowMergeableBypassApplyButWithNoBranc } func TestGithubClient_MergePullHandlesError(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { code int message string @@ -939,11 +959,12 @@ func TestGithubClient_MergePullHandlesError(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() err = client.MergePull( + logger, models.PullRequest{ BaseRepo: models.Repo{ FullName: "owner/repo", @@ -973,11 +994,14 @@ func TestGithubClient_MergePullHandlesError(t *testing.T) { // Test that if the pull request only allows a certain merge method that we // use that method func TestGithubClient_MergePullCorrectMethod(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := map[string]struct { - allowMerge bool - allowRebase bool - allowSquash bool - expMethod string + allowMerge bool + allowRebase bool + allowSquash bool + mergeMethodOption string + expMethod string + expErr string }{ "all true": { allowMerge: true, @@ -1009,6 +1033,59 @@ func TestGithubClient_MergePullCorrectMethod(t *testing.T) { allowSquash: false, expMethod: "rebase", }, + "all true: merge with merge: overrided by command": { + allowMerge: true, + allowRebase: true, + allowSquash: true, + mergeMethodOption: "merge", + expMethod: "merge", + }, + "all true: merge with rebase: overrided by command": { + allowMerge: true, + allowRebase: true, + allowSquash: true, + mergeMethodOption: "rebase", + expMethod: "rebase", + }, + "all true: merge with squash: overrided by command": { + allowMerge: true, + allowRebase: true, + allowSquash: true, + mergeMethodOption: "squash", + expMethod: "squash", + }, + "merge with merge: overridden by command: merge not allowed": { + allowMerge: false, + allowRebase: true, + allowSquash: true, + mergeMethodOption: "merge", + expMethod: "", + expErr: "Merge method 'merge' is not allowed by the repository Pull Request settings", + }, + "merge with rebase: overridden by command: rebase not allowed": { + allowMerge: true, + allowRebase: false, + allowSquash: true, + mergeMethodOption: "rebase", + expMethod: "", + expErr: "Merge method 'rebase' is not allowed by the repository Pull Request settings", + }, + "merge with squash: overridden by command: squash not allowed": { + allowMerge: true, + allowRebase: true, + allowSquash: false, + mergeMethodOption: "squash", + expMethod: "", + expErr: "Merge method 'squash' is not allowed by the repository Pull Request settings", + }, + "merge with unknown: overridden by command: unknown doesn't exist": { + allowMerge: true, + allowRebase: true, + allowSquash: true, + mergeMethodOption: "unknown", + expMethod: "", + expErr: "Merge method 'unknown' is unknown. Specify one of the valid values: 'merge, rebase, squash'", + }, } for name, c := range cases { @@ -1062,11 +1139,12 @@ func TestGithubClient_MergePullCorrectMethod(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() err = client.MergePull( + logger, models.PullRequest{ BaseRepo: models.Repo{ FullName: "runatlantis/atlantis", @@ -1082,15 +1160,20 @@ func TestGithubClient_MergePullCorrectMethod(t *testing.T) { Num: 1, }, models.PullRequestOptions{ DeleteSourceBranchOnMerge: false, + MergeMethod: c.mergeMethodOption, }) - Ok(t, err) + if c.expErr == "" { + Ok(t, err) + } else { + ErrContains(t, c.expErr, err) + } }) } } func TestGithubClient_MarkdownPullLink(t *testing.T) { - client, err := vcs.NewGithubClient("hostname", &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient("hostname", &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) pull := models.PullRequest{Num: 1} s, _ := client.MarkdownPullLink(pull) @@ -1110,6 +1193,7 @@ func disableSSLVerification() func() { } func TestGithubClient_SplitComments(t *testing.T) { + logger := logging.NewNoopLogger(t) type githubComment struct { Body string `json:"body"` } @@ -1145,7 +1229,7 @@ func TestGithubClient_SplitComments(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() pull := models.PullRequest{Num: 1} @@ -1162,9 +1246,9 @@ func TestGithubClient_SplitComments(t *testing.T) { } // create an extra long string comment := strings.Repeat("a", 65537) - err = client.CreateComment(repo, pull.Num, comment, command.Plan.String()) + err = client.CreateComment(logger, repo, pull.Num, comment, command.Plan.String()) Ok(t, err) - err = client.CreateComment(repo, pull.Num, comment, "") + err = client.CreateComment(logger, repo, pull.Num, comment, "") Ok(t, err) body := strings.Split(githubComments[1].Body, "\n") @@ -1179,6 +1263,7 @@ func TestGithubClient_SplitComments(t *testing.T) { // Test that we retry the get pull request call if it 404s. func TestGithubClient_Retry404(t *testing.T) { + logger := logging.NewNoopLogger(t) var numCalls = 0 testServer := httptest.NewTLSServer( @@ -1203,7 +1288,7 @@ func TestGithubClient_Retry404(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() repo := models.Repo{ @@ -1217,13 +1302,14 @@ func TestGithubClient_Retry404(t *testing.T) { Hostname: "github.com", }, } - _, err = client.GetPullRequest(repo, 1) + _, err = client.GetPullRequest(logger, repo, 1) Ok(t, err) Equals(t, 3, numCalls) } // Test that we retry the get pull request files call if it 404s. func TestGithubClient_Retry404Files(t *testing.T) { + logger := logging.NewNoopLogger(t) var numCalls = 0 testServer := httptest.NewTLSServer( @@ -1248,7 +1334,7 @@ func TestGithubClient_Retry404Files(t *testing.T) { testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() repo := models.Repo{ @@ -1263,7 +1349,7 @@ func TestGithubClient_Retry404Files(t *testing.T) { }, } pr := models.PullRequest{Num: 1} - _, err = client.GetModifiedFiles(repo, pr) + _, err = client.GetModifiedFiles(logger, repo, pr) Ok(t, err) Equals(t, 3, numCalls) } @@ -1301,7 +1387,7 @@ func TestGithubClient_GetTeamNamesForUser(t *testing.T) { })) testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logger) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logger) Ok(t, err) defer disableSSLVerification()() @@ -1499,7 +1585,7 @@ func TestGithubClient_DiscardReviews(t *testing.T) { })) testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) defer disableSSLVerification()() if err := client.DiscardReviews(tt.args.repo, tt.args.pull); (err != nil) != tt.wantErr { @@ -1568,16 +1654,18 @@ func TestGithubClient_GetPullLabels(t *testing.T) { })) testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logger) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logger) Ok(t, err) defer disableSSLVerification()() - labels, err := client.GetPullLabels(models.Repo{ - Owner: "runatlantis", - Name: "atlantis", - }, models.PullRequest{ - Num: 1, - }) + labels, err := client.GetPullLabels( + logger, + models.Repo{ + Owner: "runatlantis", + Name: "atlantis", + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, []string{"docs", "go", "needs tests", "work-in-progress"}, labels) } @@ -1603,16 +1691,19 @@ func TestGithubClient_GetPullLabels_EmptyResponse(t *testing.T) { })) testServerURL, err := url.Parse(testServer.URL) Ok(t, err) - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logger) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass", ""}, vcs.GithubConfig{}, 0, logger) Ok(t, err) defer disableSSLVerification()() - labels, err := client.GetPullLabels(models.Repo{ - Owner: "runatlantis", - Name: "atlantis", - }, models.PullRequest{ - Num: 1, - }) + labels, err := client.GetPullLabels( + logger, + models.Repo{ + Owner: "runatlantis", + Name: "atlantis", + }, + models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, 0, len(labels)) } diff --git a/server/events/vcs/github_credentials.go b/server/events/vcs/github_credentials.go index 21d4296256..4b322fa6cb 100644 --- a/server/events/vcs/github_credentials.go +++ b/server/events/vcs/github_credentials.go @@ -5,10 +5,11 @@ import ( "fmt" "net/http" "net/url" + "os" "strings" "github.com/bradleyfalzon/ghinstallation/v2" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/pkg/errors" ) @@ -42,17 +43,45 @@ func (c *GithubAnonymousCredentials) GetToken() (string, error) { // GithubUserCredentials implements GithubCredentials for the personal auth token flow. type GithubUserCredentials struct { - User string - Token string + User string + Token string + TokenFile string +} + +type GitHubUserTransport struct { + Credentials *GithubUserCredentials + Transport *github.BasicAuthTransport +} + +func (t *GitHubUserTransport) RoundTrip(req *http.Request) (*http.Response, error) { + // update token + token, err := t.Credentials.GetToken() + if err != nil { + return nil, err + } + t.Transport.Password = token + + // defer to the underlying transport + return t.Transport.RoundTrip(req) } // Client returns a client for basic auth user credentials. func (c *GithubUserCredentials) Client() (*http.Client, error) { - tr := &github.BasicAuthTransport{ - Username: strings.TrimSpace(c.User), - Password: strings.TrimSpace(c.Token), + password, err := c.GetToken() + if err != nil { + return nil, err + } + + client := &http.Client{ + Transport: &GitHubUserTransport{ + Credentials: c, + Transport: &github.BasicAuthTransport{ + Username: strings.TrimSpace(c.User), + Password: strings.TrimSpace(password), + }, + }, } - return tr.Client(), nil + return client, nil } // GetUser returns the username for these credentials. @@ -62,6 +91,15 @@ func (c *GithubUserCredentials) GetUser() (string, error) { // GetToken returns the user token. func (c *GithubUserCredentials) GetToken() (string, error) { + if c.TokenFile != "" { + content, err := os.ReadFile(c.TokenFile) + if err != nil { + return "", fmt.Errorf("failed reading github token file: %w", err) + } + + return string(content), nil + } + return c.Token, nil } @@ -71,7 +109,7 @@ type GithubAppCredentials struct { Key []byte Hostname string apiURL *url.URL - installationID int64 + InstallationID int64 tr *ghinstallation.Transport AppSlug string } @@ -122,8 +160,8 @@ func (c *GithubAppCredentials) GetToken() (string, error) { } func (c *GithubAppCredentials) getInstallationID() (int64, error) { - if c.installationID != 0 { - return c.installationID, nil + if c.InstallationID != 0 { + return c.InstallationID, nil } tr := http.DefaultTransport @@ -148,8 +186,8 @@ func (c *GithubAppCredentials) getInstallationID() (int64, error) { return 0, fmt.Errorf("wrong number of installations, expected 1, found %d", len(installations)) } - c.installationID = installations[0].GetID() - return c.installationID, nil + c.InstallationID = installations[0].GetID() + return c.InstallationID, nil } func (c *GithubAppCredentials) transport() (*ghinstallation.Transport, error) { diff --git a/server/events/vcs/github_credentials_test.go b/server/events/vcs/github_credentials_test.go index f6975056c9..e1a0f67e8c 100644 --- a/server/events/vcs/github_credentials_test.go +++ b/server/events/vcs/github_credentials_test.go @@ -10,14 +10,15 @@ import ( ) func TestGithubClient_GetUser_AppSlug(t *testing.T) { + logger := logging.NewNoopLogger(t) defer disableSSLVerification()() testServer, err := testdata.GithubAppTestServer(t) Ok(t, err) anonCreds := &vcs.GithubAnonymousCredentials{} - anonClient, err := vcs.NewGithubClient(testServer, anonCreds, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + anonClient, err := vcs.NewGithubClient(testServer, anonCreds, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) - tempSecrets, err := anonClient.ExchangeCode("good-code") + tempSecrets, err := anonClient.ExchangeCode(logger, "good-code") Ok(t, err) appCreds := &vcs.GithubAppCredentials{ @@ -34,14 +35,15 @@ func TestGithubClient_GetUser_AppSlug(t *testing.T) { } func TestGithubClient_AppAuthentication(t *testing.T) { + logger := logging.NewNoopLogger(t) defer disableSSLVerification()() testServer, err := testdata.GithubAppTestServer(t) Ok(t, err) anonCreds := &vcs.GithubAnonymousCredentials{} - anonClient, err := vcs.NewGithubClient(testServer, anonCreds, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + anonClient, err := vcs.NewGithubClient(testServer, anonCreds, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) - tempSecrets, err := anonClient.ExchangeCode("good-code") + tempSecrets, err := anonClient.ExchangeCode(logger, "good-code") Ok(t, err) appCreds := &vcs.GithubAppCredentials{ @@ -49,7 +51,44 @@ func TestGithubClient_AppAuthentication(t *testing.T) { Key: []byte(testdata.GithubPrivateKey), Hostname: testServer, } - _, err = vcs.NewGithubClient(testServer, appCreds, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + _, err = vcs.NewGithubClient(testServer, appCreds, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) + Ok(t, err) + + token, err := appCreds.GetToken() + Ok(t, err) + + newToken, err := appCreds.GetToken() + Ok(t, err) + + user, err := appCreds.GetUser() + Ok(t, err) + + Assert(t, user == "", "user should be empty") + + if token != newToken { + t.Errorf("app token was not cached: %q != %q", token, newToken) + } +} + +func TestGithubClient_MultipleAppAuthentication(t *testing.T) { + logger := logging.NewNoopLogger(t) + defer disableSSLVerification()() + testServer, err := testdata.GithubMultipleAppTestServer(t) + Ok(t, err) + + anonCreds := &vcs.GithubAnonymousCredentials{} + anonClient, err := vcs.NewGithubClient(testServer, anonCreds, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) + Ok(t, err) + tempSecrets, err := anonClient.ExchangeCode(logger, "good-code") + Ok(t, err) + + appCreds := &vcs.GithubAppCredentials{ + AppID: tempSecrets.ID, + InstallationID: 1, + Key: []byte(testdata.GithubPrivateKey), + Hostname: testServer, + } + _, err = vcs.NewGithubClient(testServer, appCreds, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) token, err := appCreds.GetToken() diff --git a/server/events/vcs/gh_app_creds_rotator.go b/server/events/vcs/github_token_rotator.go similarity index 60% rename from server/events/vcs/gh_app_creds_rotator.go rename to server/events/vcs/github_token_rotator.go index 6522f33118..2b184bd6b8 100644 --- a/server/events/vcs/gh_app_creds_rotator.go +++ b/server/events/vcs/github_token_rotator.go @@ -8,37 +8,40 @@ import ( "github.com/runatlantis/atlantis/server/scheduled" ) -// GitCredsTokenRotator continuously tries to rotate the github app access token every 30 seconds and writes the ~/.git-credentials file -type GitCredsTokenRotator interface { +// GithubTokenRotator continuously tries to rotate the github app access token every 30 seconds and writes the ~/.git-credentials file +type GithubTokenRotator interface { Run() GenerateJob() (scheduled.JobDefinition, error) } -type githubAppTokenRotator struct { +type githubTokenRotator struct { log logging.SimpleLogging githubCredentials GithubCredentials githubHostname string + gitUser string homeDirPath string } -func NewGithubAppTokenRotator( +func NewGithubTokenRotator( log logging.SimpleLogging, githubCredentials GithubCredentials, githubHostname string, - homeDirPath string) GitCredsTokenRotator { + gitUser string, + homeDirPath string) GithubTokenRotator { - return &githubAppTokenRotator{ + return &githubTokenRotator{ log: log, githubCredentials: githubCredentials, githubHostname: githubHostname, + gitUser: gitUser, homeDirPath: homeDirPath, } } // make sure interface is implemented correctly -var _ GitCredsTokenRotator = (*githubAppTokenRotator)(nil) +var _ GithubTokenRotator = (*githubTokenRotator)(nil) -func (r *githubAppTokenRotator) GenerateJob() (scheduled.JobDefinition, error) { +func (r *githubTokenRotator) GenerateJob() (scheduled.JobDefinition, error) { return scheduled.JobDefinition{ Job: r, @@ -46,7 +49,7 @@ func (r *githubAppTokenRotator) GenerateJob() (scheduled.JobDefinition, error) { }, r.rotate() } -func (r *githubAppTokenRotator) Run() { +func (r *githubTokenRotator) Run() { err := r.rotate() if err != nil { // at least log the error message here, as we want to notify the that user that the key rotation wasn't successful @@ -54,17 +57,17 @@ func (r *githubAppTokenRotator) Run() { } } -func (r *githubAppTokenRotator) rotate() error { - r.log.Debug("Refreshing git tokens for Github App") +func (r *githubTokenRotator) rotate() error { + r.log.Debug("Refreshing Github tokens for .git-credentials") token, err := r.githubCredentials.GetToken() if err != nil { return errors.Wrap(err, "Getting github token") } - r.log.Debug("token %s", token) + r.log.Debug("Token successfully refreshed") // https://developer.github.com/apps/building-github-apps/authenticating-with-github-apps/#http-based-git-access-by-an-installation - if err := WriteGitCreds("x-access-token", token, r.githubHostname, r.homeDirPath, r.log, true); err != nil { + if err := WriteGitCreds(r.gitUser, token, r.githubHostname, r.homeDirPath, r.log, true); err != nil { return errors.Wrap(err, "Writing ~/.git-credentials file") } return nil diff --git a/server/events/vcs/gh_app_creds_rotator_test.go b/server/events/vcs/github_token_rotator_test.go similarity index 83% rename from server/events/vcs/gh_app_creds_rotator_test.go rename to server/events/vcs/github_token_rotator_test.go index e0d2e28bf4..19e30b95f1 100644 --- a/server/events/vcs/gh_app_creds_rotator_test.go +++ b/server/events/vcs/github_token_rotator_test.go @@ -13,15 +13,16 @@ import ( . "github.com/runatlantis/atlantis/testing" ) -func Test_githubAppTokenRotator_GenerateJob(t *testing.T) { +func Test_githubTokenRotator_GenerateJob(t *testing.T) { + logger := logging.NewNoopLogger(t) defer disableSSLVerification()() testServer, err := testdata.GithubAppTestServer(t) Ok(t, err) anonCreds := &vcs.GithubAnonymousCredentials{} - anonClient, err := vcs.NewGithubClient(testServer, anonCreds, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + anonClient, err := vcs.NewGithubClient(testServer, anonCreds, vcs.GithubConfig{}, 0, logging.NewNoopLogger(t)) Ok(t, err) - tempSecrets, err := anonClient.ExchangeCode("good-code") + tempSecrets, err := anonClient.ExchangeCode(logger, "good-code") Ok(t, err) type fields struct { githubCredentials vcs.GithubCredentials @@ -67,10 +68,10 @@ func Test_githubAppTokenRotator_GenerateJob(t *testing.T) { t.Run(tt.name, func(t *testing.T) { tmpDir := t.TempDir() t.Setenv("HOME", tmpDir) - r := vcs.NewGithubAppTokenRotator(logging.NewNoopLogger(t), tt.fields.githubCredentials, testServer, tmpDir) + r := vcs.NewGithubTokenRotator(logging.NewNoopLogger(t), tt.fields.githubCredentials, testServer, "x-access-token", tmpDir) got, err := r.GenerateJob() if (err != nil) != tt.wantErr { - t.Errorf("githubAppTokenRotator.GenerateJob() error = %v, wantErr %v", err, tt.wantErr) + t.Errorf("githubTokenRotator.GenerateJob() error = %v, wantErr %v", err, tt.wantErr) return } if tt.credsFileWritten { diff --git a/server/events/vcs/gitlab_client.go b/server/events/vcs/gitlab_client.go index d9e2b4d33c..b78c82b271 100644 --- a/server/events/vcs/gitlab_client.go +++ b/server/events/vcs/gitlab_client.go @@ -22,15 +22,14 @@ import ( "strings" "time" + "github.com/hashicorp/go-version" + "github.com/jpillora/backoff" + "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs/common" - - version "github.com/hashicorp/go-version" - "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/logging" - - "github.com/runatlantis/atlantis/server/events/models" - gitlab "github.com/xanzy/go-gitlab" + "github.com/xanzy/go-gitlab" ) // gitlabMaxCommentLength is the maximum number of chars allowed by Gitlab in a @@ -46,8 +45,6 @@ type GitlabClient struct { PollingInterval time.Duration // PollingInterval is the total duration for which to poll, where applicable. PollingTimeout time.Duration - // logger - logger logging.SimpleLogging } // commonMarkSupported is a version constraint that is true when this version of @@ -60,10 +57,10 @@ var gitlabClientUnderTest = false // NewGitlabClient returns a valid GitLab client. func NewGitlabClient(hostname string, token string, logger logging.SimpleLogging) (*GitlabClient, error) { + logger.Debug("Creating new GitLab client for %s", hostname) client := &GitlabClient{ PollingInterval: time.Second, PollingTimeout: time.Second * 30, - logger: logger, } // Create the client differently depending on the base URL. @@ -107,7 +104,7 @@ func NewGitlabClient(hostname string, token string, logger logging.SimpleLogging // Determine which version of GitLab is running. if !gitlabClientUnderTest { var err error - client.Version, err = client.GetVersion() + client.Version, err = client.GetVersion(logger) if err != nil { return nil, err } @@ -119,7 +116,8 @@ func NewGitlabClient(hostname string, token string, logger logging.SimpleLogging // GetModifiedFiles returns the names of files that were modified in the merge request // relative to the repo root, e.g. parent/child/file.txt. -func (g *GitlabClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (g *GitlabClient) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + logger.Debug("Getting modified files for GitLab merge request %d", pull.Num) const maxPerPage = 100 var files []string nextPage := 1 @@ -140,7 +138,7 @@ func (g *GitlabClient) GetModifiedFiles(repo models.Repo, pull models.PullReques for { resp, err = g.Client.Do(req, mr) if resp != nil { - g.logger.Debug("GET %s returned: %d", apiURL, resp.StatusCode) + logger.Debug("GET %s returned: %d", apiURL, resp.StatusCode) } if err != nil { return nil, err @@ -173,16 +171,17 @@ func (g *GitlabClient) GetModifiedFiles(repo models.Repo, pull models.PullReques } // CreateComment creates a comment on the merge request. -func (g *GitlabClient) CreateComment(repo models.Repo, pullNum int, comment string, _ string) error { +func (g *GitlabClient) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, _ string) error { + logger.Debug("Creating comment on GitLab merge request %d", pullNum) sepEnd := "\n```\n
" + "\n
\n\n**Warning**: Output length greater than max comment size. Continued in next comment." sepStart := "Continued from previous comment.\n
Show Output\n\n" + "```diff\n" - comments := common.SplitComment(comment, gitlabMaxCommentLength, sepEnd, sepStart) + comments := common.SplitComment(comment, gitlabMaxCommentLength, sepEnd, sepStart, 0, "") for _, c := range comments { _, resp, err := g.Client.Notes.CreateMergeRequestNote(repo.FullName, pullNum, &gitlab.CreateMergeRequestNoteOptions{Body: gitlab.Ptr(c)}) if resp != nil { - g.logger.Debug("POST /projects/%s/merge_requests/%d/notes returned: %d", repo.FullName, pullNum, resp.StatusCode) + logger.Debug("POST /projects/%s/merge_requests/%d/notes returned: %d", repo.FullName, pullNum, resp.StatusCode) } if err != nil { return err @@ -192,20 +191,22 @@ func (g *GitlabClient) CreateComment(repo models.Repo, pullNum int, comment stri } // ReactToComment adds a reaction to a comment. -func (g *GitlabClient) ReactToComment(repo models.Repo, pullNum int, commentID int64, reaction string) error { +func (g *GitlabClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error { + logger.Debug("Adding reaction '%s' to comment %d on GitLab merge request %d", reaction, commentID, pullNum) _, resp, err := g.Client.AwardEmoji.CreateMergeRequestAwardEmojiOnNote(repo.FullName, pullNum, int(commentID), &gitlab.CreateAwardEmojiOptions{Name: reaction}) if resp != nil { - g.logger.Debug("POST /projects/%s/merge_requests/%d/notes/%d/award_emoji returned: %d", repo.FullName, pullNum, commentID, resp.StatusCode) + logger.Debug("POST /projects/%s/merge_requests/%d/notes/%d/award_emoji returned: %d", repo.FullName, pullNum, commentID, resp.StatusCode) } return err } -func (g *GitlabClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string, dir string) error { +func (g *GitlabClient) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error { + logger.Debug("Hiding previous command comments on GitLab merge request %d", pullNum) var allComments []*gitlab.Note nextPage := 0 for { - g.logger.Debug("/projects/%v/merge_requests/%d/notes", repo.FullName, pullNum) + logger.Debug("/projects/%v/merge_requests/%d/notes", repo.FullName, pullNum) comments, resp, err := g.Client.Notes.ListMergeRequestNotes(repo.FullName, pullNum, &gitlab.ListMergeRequestNotesOptions{ Sort: gitlab.Ptr("asc"), @@ -213,7 +214,7 @@ func (g *GitlabClient) HidePrevCommandComments(repo models.Repo, pullNum int, co ListOptions: gitlab.ListOptions{Page: nextPage}, }) if resp != nil { - g.logger.Debug("GET /projects/%s/merge_requests/%d/notes returned: %d", repo.FullName, pullNum, resp.StatusCode) + logger.Debug("GET /projects/%s/merge_requests/%d/notes returned: %d", repo.FullName, pullNum, resp.StatusCode) } if err != nil { return errors.Wrap(err, "listing comments") @@ -255,12 +256,12 @@ func (g *GitlabClient) HidePrevCommandComments(repo models.Repo, pullNum int, co continue } - g.logger.Debug("Updating merge request note: Repo: '%s', MR: '%d', comment ID: '%d'", repo.FullName, pullNum, comment.ID) + logger.Debug("Updating merge request note: Repo: '%s', MR: '%d', comment ID: '%d'", repo.FullName, pullNum, comment.ID) supersededComment := summaryHeader + lineFeed + comment.Body + lineFeed + summaryFooter + lineFeed _, resp, err := g.Client.Notes.UpdateMergeRequestNote(repo.FullName, pullNum, comment.ID, &gitlab.UpdateMergeRequestNoteOptions{Body: &supersededComment}) if resp != nil { - g.logger.Debug("PUT /projects/%s/merge_requests/%d/notes/%d returned: %d", repo.FullName, pullNum, comment.ID, resp.StatusCode) + logger.Debug("PUT /projects/%s/merge_requests/%d/notes/%d returned: %d", repo.FullName, pullNum, comment.ID, resp.StatusCode) } if err != nil { return errors.Wrapf(err, "updating comment %d", comment.ID) @@ -271,10 +272,11 @@ func (g *GitlabClient) HidePrevCommandComments(repo models.Repo, pullNum int, co } // PullIsApproved returns true if the merge request was approved. -func (g *GitlabClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { +func (g *GitlabClient) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { + logger.Debug("Checking if GitLab merge request %d is approved", pull.Num) approvals, resp, err := g.Client.MergeRequests.GetMergeRequestApprovals(repo.FullName, pull.Num) if resp != nil { - g.logger.Debug("GET /projects/%s/merge_requests/%d/approvals returned: %d", repo.FullName, pull.Num, resp.StatusCode) + logger.Debug("GET /projects/%s/merge_requests/%d/approvals returned: %d", repo.FullName, pull.Num, resp.StatusCode) } if err != nil { return approvalStatus, err @@ -298,10 +300,11 @@ func (g *GitlabClient) PullIsApproved(repo models.Repo, pull models.PullRequest) // See: // - https://gitlab.com/gitlab-org/gitlab-ee/issues/3169 // - https://gitlab.com/gitlab-org/gitlab-ce/issues/42344 -func (g *GitlabClient) PullIsMergeable(repo models.Repo, pull models.PullRequest, vcsstatusname string) (bool, error) { +func (g *GitlabClient) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, vcsstatusname string, _ []string) (bool, error) { + logger.Debug("Checking if GitLab merge request %d is mergeable", pull.Num) mr, resp, err := g.Client.MergeRequests.GetMergeRequest(repo.FullName, pull.Num, nil) if resp != nil { - g.logger.Debug("GET /projects/%s/merge_requests/%d returned: %d", repo.FullName, pull.Num, resp.StatusCode) + logger.Debug("GET /projects/%s/merge_requests/%d returned: %d", repo.FullName, pull.Num, resp.StatusCode) } if err != nil { return false, err @@ -319,7 +322,7 @@ func (g *GitlabClient) PullIsMergeable(repo models.Repo, pull models.PullRequest // Get project configuration project, resp, err := g.Client.Projects.GetProject(mr.ProjectID, nil) if resp != nil { - g.logger.Debug("GET /projects/%d returned: %d", mr.ProjectID, resp.StatusCode) + logger.Debug("GET /projects/%d returned: %d", mr.ProjectID, resp.StatusCode) } if err != nil { return false, err @@ -328,7 +331,7 @@ func (g *GitlabClient) PullIsMergeable(repo models.Repo, pull models.PullRequest // Get Commit Statuses statuses, _, err := g.Client.Commits.GetCommitStatuses(mr.ProjectID, commit, nil) if resp != nil { - g.logger.Debug("GET /projects/%d/commits/%s/statuses returned: %d", mr.ProjectID, commit, resp.StatusCode) + logger.Debug("GET /projects/%d/commits/%s/statuses returned: %d", mr.ProjectID, commit, resp.StatusCode) } if err != nil { return false, err @@ -346,28 +349,39 @@ func (g *GitlabClient) PullIsMergeable(repo models.Repo, pull models.PullRequest allowSkippedPipeline := project.AllowMergeOnSkippedPipeline && isPipelineSkipped - supportsDetailedMergeStatus, err := g.SupportsDetailedMergeStatus() + supportsDetailedMergeStatus, err := g.SupportsDetailedMergeStatus(logger) if err != nil { return false, err } + if supportsDetailedMergeStatus { + logger.Debug("Detailed merge status: '%s'", mr.DetailedMergeStatus) + } else { + logger.Debug("Merge status: '%s'", mr.MergeStatus) //nolint:staticcheck // Need to reference deprecated field for backwards compatibility + } + if ((supportsDetailedMergeStatus && (mr.DetailedMergeStatus == "mergeable" || mr.DetailedMergeStatus == "ci_still_running" || - mr.DetailedMergeStatus == "ci_must_pass")) || + mr.DetailedMergeStatus == "ci_must_pass" || + mr.DetailedMergeStatus == "need_rebase")) || (!supportsDetailedMergeStatus && mr.MergeStatus == "can_be_merged")) && //nolint:staticcheck // Need to reference deprecated field for backwards compatibility mr.ApprovalsBeforeMerge <= 0 && mr.BlockingDiscussionsResolved && !mr.WorkInProgress && (allowSkippedPipeline || !isPipelineSkipped) { + + logger.Debug("Merge request is mergeable") return true, nil } + logger.Debug("Merge request is not mergeable") return false, nil } -func (g *GitlabClient) SupportsDetailedMergeStatus() (bool, error) { - v, err := g.GetVersion() +func (g *GitlabClient) SupportsDetailedMergeStatus(logger logging.SimpleLogging) (bool, error) { + logger.Debug("Checking if GitLab supports detailed merge status") + v, err := g.GetVersion(logger) if err != nil { return false, err } @@ -380,7 +394,8 @@ func (g *GitlabClient) SupportsDetailedMergeStatus() (bool, error) { } // UpdateStatus updates the build status of a commit. -func (g *GitlabClient) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { +func (g *GitlabClient) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { + logger.Debug("Updating GitLab commit status for '%s' to '%s'", src, state) gitlabState := gitlab.Pending switch state { case models.PendingCommitStatus: @@ -391,60 +406,116 @@ func (g *GitlabClient) UpdateStatus(repo models.Repo, pull models.PullRequest, s gitlabState = gitlab.Success } - // refTarget is set to the head pipeline of the MR if it exists, or else it is set to the head branch - // of the MR. This is needed because the commit status is only shown in the MR if the pipeline is - // assigned to an MR reference. - // Try to get the MR details a couple of times in case the pipeline is not yet assigned to the MR - refTarget := pull.HeadBranch + // refTarget is only set to the head branch of the MR if HeadPipeline is not found + // when HeadPipeline is found we set the pipelineID for the request instead + var refTarget *string + var pipelineID *int retries := 1 delay := 2 * time.Second var mr *gitlab.MergeRequest var err error + // Try to get the MR details a couple of times in case the pipeline is not yet assigned to the MR for i := 0; i <= retries; i++ { - mr, err = g.GetMergeRequest(pull.BaseRepo.FullName, pull.Num) + mr, err = g.GetMergeRequest(logger, pull.BaseRepo.FullName, pull.Num) if err != nil { return err } if mr.HeadPipeline != nil { - g.logger.Debug("Head pipeline found for merge request %d, source '%s'. refTarget '%s'", + logger.Debug("Head pipeline found for merge request %d, source '%s'. refTarget '%s'", pull.Num, mr.HeadPipeline.Source, mr.HeadPipeline.Ref) - refTarget = mr.HeadPipeline.Ref + // set pipeline ID for the req once found + pipelineID = gitlab.Ptr(mr.HeadPipeline.ID) break } if i != retries { - g.logger.Debug("Head pipeline not found for merge request %d. Retrying in %s", + logger.Debug("Head pipeline not found for merge request %d. Retrying in %s", pull.Num, delay) time.Sleep(delay) } else { - g.logger.Debug("Head pipeline not found for merge request %d.", + // set the ref target here if the pipeline wasn't found + refTarget = gitlab.Ptr(pull.HeadBranch) + logger.Debug("Head pipeline not found for merge request %d.", pull.Num) } } - _, resp, err := g.Client.Commits.SetCommitStatus(repo.FullName, pull.HeadCommit, &gitlab.SetCommitStatusOptions{ - State: gitlabState, - Context: gitlab.Ptr(src), - Description: gitlab.Ptr(description), - TargetURL: &url, - Ref: gitlab.Ptr(refTarget), - }) - if resp != nil { - g.logger.Debug("POST /projects/%s/statuses/%s returned: %d", repo.FullName, pull.HeadCommit, resp.StatusCode) + var ( + resp *gitlab.Response + maxAttempts = 10 + retryer = &backoff.Backoff{ + Jitter: true, + Max: g.PollingInterval, + } + ) + + for i := 0; i < maxAttempts; i++ { + logger := logger.With( + "attempt", i+1, + "max_attempts", maxAttempts, + "repo", repo.FullName, + "commit", pull.HeadCommit, + "state", state.String(), + ) + + _, resp, err = g.Client.Commits.SetCommitStatus(repo.FullName, pull.HeadCommit, &gitlab.SetCommitStatusOptions{ + State: gitlabState, + Context: gitlab.Ptr(src), + Description: gitlab.Ptr(description), + TargetURL: &url, + // only one of these should get sent in the request + PipelineID: pipelineID, + Ref: refTarget, + }) + + if resp != nil { + logger.Debug("POST /projects/%s/statuses/%s returned: %d", repo.FullName, pull.HeadCommit, resp.StatusCode) + + // GitLab returns a `409 Conflict` status when the commit pipeline status is being changed/locked by another request, + // which is likely to happen if you use [`--parallel-pool-size > 1`] and [`parallel-plan|apply`]. + // + // The likelihood of this happening is increased when the number of parallel apply jobs is increased. + // + // Returning the [err] without retrying will permanently leave the GitLab commit status in a "running" state, + // which would prevent Atlantis from merging the merge request on [apply]. + // + // GitLab does not allow merge requests to be merged when the pipeline status is "running." + + if resp.StatusCode == http.StatusConflict { + sleep := retryer.ForAttempt(float64(i)) + + logger.With("retry_in", sleep).Warn("GitLab returned HTTP [409 Conflict] when updating commit status") + time.Sleep(sleep) + + continue + } + } + + // Log we got a 200 OK response from GitLab after at least one retry to help with debugging/understanding delays/errors. + if err == nil && i > 0 { + logger.Info("GitLab returned HTTP [200 OK] after updating commit status") + } + + // Return the err, which might be nil if everything worked out + return err } - return err + + // If we got here, we've exhausted all attempts to update the commit status and still failed, so return the error upstream + return errors.Wrap(err, fmt.Sprintf("failed to update commit status for '%s' @ '%s' to '%s' after %d attempts", repo.FullName, pull.HeadCommit, src, maxAttempts)) } -func (g *GitlabClient) GetMergeRequest(repoFullName string, pullNum int) (*gitlab.MergeRequest, error) { +func (g *GitlabClient) GetMergeRequest(logger logging.SimpleLogging, repoFullName string, pullNum int) (*gitlab.MergeRequest, error) { + logger.Debug("Getting GitLab merge request %d", pullNum) mr, resp, err := g.Client.MergeRequests.GetMergeRequest(repoFullName, pullNum, nil) if resp != nil { - g.logger.Debug("GET /projects/%s/merge_requests/%d returned: %d", repoFullName, pullNum, resp.StatusCode) + logger.Debug("GET /projects/%s/merge_requests/%d returned: %d", repoFullName, pullNum, resp.StatusCode) } return mr, err } -func (g *GitlabClient) WaitForSuccessPipeline(ctx context.Context, pull models.PullRequest) { +func (g *GitlabClient) WaitForSuccessPipeline(logger logging.SimpleLogging, ctx context.Context, pull models.PullRequest) { + logger.Debug("Waiting for GitLab success pipeline for merge request %d", pull.Num) ctx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() @@ -453,10 +524,10 @@ func (g *GitlabClient) WaitForSuccessPipeline(ctx context.Context, pull models.P case <-ctx.Done(): // validation check time out cancel() - return //ctx.Err() + return // ctx.Err() default: - mr, _ := g.GetMergeRequest(pull.BaseRepo.FullName, pull.Num) + mr, _ := g.GetMergeRequest(logger, pull.BaseRepo.FullName, pull.Num) // check if pipeline has a success state to merge if mr.HeadPipeline.Status == "success" { return @@ -467,17 +538,18 @@ func (g *GitlabClient) WaitForSuccessPipeline(ctx context.Context, pull models.P } // MergePull merges the merge request. -func (g *GitlabClient) MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) error { +func (g *GitlabClient) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error { + logger.Debug("Merging GitLab merge request %d", pull.Num) commitMsg := common.AutomergeCommitMsg(pull.Num) - mr, err := g.GetMergeRequest(pull.BaseRepo.FullName, pull.Num) + mr, err := g.GetMergeRequest(logger, pull.BaseRepo.FullName, pull.Num) if err != nil { return errors.Wrap( err, "unable to merge merge request, it was not possible to retrieve the merge request") } project, resp, err := g.Client.Projects.GetProject(mr.ProjectID, nil) if resp != nil { - g.logger.Debug("GET /projects/%d returned: %d", mr.ProjectID, resp.StatusCode) + logger.Debug("GET /projects/%d returned: %d", mr.ProjectID, resp.StatusCode) } if err != nil { return errors.Wrap( @@ -485,7 +557,7 @@ func (g *GitlabClient) MergePull(pull models.PullRequest, pullOptions models.Pul } if project != nil && project.OnlyAllowMergeIfPipelineSucceeds { - g.WaitForSuccessPipeline(context.Background(), pull) + g.WaitForSuccessPipeline(logger, context.Background(), pull) } _, resp, err = g.Client.MergeRequests.AcceptMergeRequest( @@ -496,7 +568,7 @@ func (g *GitlabClient) MergePull(pull models.PullRequest, pullOptions models.Pul ShouldRemoveSourceBranch: &pullOptions.DeleteSourceBranchOnMerge, }) if resp != nil { - g.logger.Debug("PUT /projects/%s/merge_requests/%d/merge returned: %d", pull.BaseRepo.FullName, pull.Num, resp.StatusCode) + logger.Debug("PUT /projects/%s/merge_requests/%d/merge returned: %d", pull.BaseRepo.FullName, pull.Num, resp.StatusCode) } return errors.Wrap(err, "unable to merge merge request, it may not be in a mergeable state") } @@ -512,10 +584,11 @@ func (g *GitlabClient) DiscardReviews(_ models.Repo, _ models.PullRequest) error } // GetVersion returns the version of the Gitlab server this client is using. -func (g *GitlabClient) GetVersion() (*version.Version, error) { +func (g *GitlabClient) GetVersion(logger logging.SimpleLogging) (*version.Version, error) { + logger.Debug("Getting GitLab version") versionResp, resp, err := g.Client.Version.GetVersion() if resp != nil { - g.logger.Debug("GET /version returned: %d", resp.StatusCode) + logger.Debug("GET /version returned: %d", resp.StatusCode) } if err != nil { return nil, err @@ -560,12 +633,13 @@ func (g *GitlabClient) GetTeamNamesForUser(_ models.Repo, _ models.User) ([]stri // GetFileContent a repository file content from VCS (which support fetch a single file from repository) // The first return value indicates whether the repo contains a file or not // if BaseRepo had a file, its content will placed on the second return value -func (g *GitlabClient) GetFileContent(pull models.PullRequest, fileName string) (bool, []byte, error) { +func (g *GitlabClient) GetFileContent(logger logging.SimpleLogging, pull models.PullRequest, fileName string) (bool, []byte, error) { + logger.Debug("Getting GitLab file content for file '%s'", fileName) opt := gitlab.GetRawFileOptions{Ref: gitlab.Ptr(pull.HeadBranch)} bytes, resp, err := g.Client.RepositoryFiles.GetRawFile(pull.BaseRepo.FullName, fileName, &opt) if resp != nil { - g.logger.Debug("GET /projects/%s/repository/files/%s/raw returned: %d", pull.BaseRepo.FullName, fileName, resp.StatusCode) + logger.Debug("GET /projects/%s/repository/files/%s/raw returned: %d", pull.BaseRepo.FullName, fileName, resp.StatusCode) } if resp != nil && resp.StatusCode == http.StatusNotFound { return false, []byte{}, nil @@ -582,10 +656,11 @@ func (g *GitlabClient) SupportsSingleFileDownload(_ models.Repo) bool { return true } -func (g *GitlabClient) GetCloneURL(_ models.VCSHostType, repo string) (string, error) { +func (g *GitlabClient) GetCloneURL(logger logging.SimpleLogging, _ models.VCSHostType, repo string) (string, error) { + logger.Debug("Getting GitLab clone URL for repo '%s'", repo) project, resp, err := g.Client.Projects.GetProject(repo, nil) if resp != nil { - g.logger.Debug("GET /projects/%s returned: %d", repo, resp.StatusCode) + logger.Debug("GET /projects/%s returned: %d", repo, resp.StatusCode) } if err != nil { return "", err @@ -593,10 +668,11 @@ func (g *GitlabClient) GetCloneURL(_ models.VCSHostType, repo string) (string, e return project.HTTPURLToRepo, nil } -func (g *GitlabClient) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (g *GitlabClient) GetPullLabels(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + logger.Debug("Getting GitLab labels for merge request %d", pull.Num) mr, resp, err := g.Client.MergeRequests.GetMergeRequest(repo.FullName, pull.Num, nil) if resp != nil { - g.logger.Debug("GET /projects/%s/merge_requests/%d returned: %d", repo.FullName, pull.Num, resp.StatusCode) + logger.Debug("GET /projects/%s/merge_requests/%d returned: %d", repo.FullName, pull.Num, resp.StatusCode) } if err != nil { diff --git a/server/events/vcs/gitlab_client_test.go b/server/events/vcs/gitlab_client_test.go index 99cf6b426e..98de58287f 100644 --- a/server/events/vcs/gitlab_client_test.go +++ b/server/events/vcs/gitlab_client_test.go @@ -12,17 +12,19 @@ import ( "testing" "time" - version "github.com/hashicorp/go-version" + "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" - gitlab "github.com/xanzy/go-gitlab" + "github.com/xanzy/go-gitlab" . "github.com/runatlantis/atlantis/testing" ) var projectID = 4580910 +const gitlabPipelineSuccessMrID = 488598 + // Test that the base url gets set properly. func TestNewGitlabClient_BaseURL(t *testing.T) { gitlabClientUnderTest = true @@ -114,6 +116,7 @@ func TestGitlabClient_SupportsCommonMark(t *testing.T) { } func TestGitlabClient_GetModifiedFiles(t *testing.T) { + logger := logging.NewNoopLogger(t) cases := []struct { attempts int }{ @@ -155,10 +158,10 @@ func TestGitlabClient_GetModifiedFiles(t *testing.T) { Version: nil, PollingInterval: time.Second * 0, PollingTimeout: time.Second * 10, - logger: logging.NewNoopLogger(t), } filenames, err := client.GetModifiedFiles( + logger, models.Repo{ FullName: "lkysow/atlantis-example", Owner: "lkysow", @@ -176,10 +179,10 @@ func TestGitlabClient_GetModifiedFiles(t *testing.T) { Equals(t, []string{"somefile.yaml"}, filenames) }) } - } func TestGitlabClient_MergePull(t *testing.T) { + logger := logging.NewNoopLogger(t) mergeSuccess, err := os.ReadFile("testdata/github-pull-request.json") Ok(t, err) @@ -244,19 +247,20 @@ func TestGitlabClient_MergePull(t *testing.T) { client := &GitlabClient{ Client: internalClient, Version: nil, - logger: logging.NewNoopLogger(t), } - err = client.MergePull(models.PullRequest{ - Num: 1, - BaseRepo: models.Repo{ - FullName: "runatlantis/atlantis", - Owner: "runatlantis", - Name: "atlantis", - }, - }, models.PullRequestOptions{ - DeleteSourceBranchOnMerge: false, - }) + err = client.MergePull( + logger, + models.PullRequest{ + Num: 1, + BaseRepo: models.Repo{ + FullName: "runatlantis/atlantis", + Owner: "runatlantis", + Name: "atlantis", + }, + }, models.PullRequestOptions{ + DeleteSourceBranchOnMerge: false, + }) if c.expErr == "" { Ok(t, err) } else { @@ -268,6 +272,7 @@ func TestGitlabClient_MergePull(t *testing.T) { } func TestGitlabClient_UpdateStatus(t *testing.T) { + logger := logging.NewNoopLogger(t) pipelineSuccess, err := os.ReadFile("testdata/gitlab-pipeline-success.json") Ok(t, err) @@ -299,7 +304,7 @@ func TestGitlabClient_UpdateStatus(t *testing.T) { body, err := io.ReadAll(r.Body) Ok(t, err) - exp := fmt.Sprintf(`{"state":"%s","ref":"patch-1-merger","context":"src","target_url":"https://google.com","description":"description"}`, c.expState) + exp := fmt.Sprintf(`{"state":"%s","context":"src","target_url":"https://google.com","description":"description","pipeline_id":%d}`, c.expState, gitlabPipelineSuccessMrID) Equals(t, exp, string(body)) defer r.Body.Close() // nolint: errcheck w.Write([]byte("{}")) // nolint: errcheck @@ -320,7 +325,6 @@ func TestGitlabClient_UpdateStatus(t *testing.T) { client := &GitlabClient{ Client: internalClient, Version: nil, - logger: logging.NewNoopLogger(t), } repo := models.Repo{ @@ -328,19 +332,130 @@ func TestGitlabClient_UpdateStatus(t *testing.T) { Owner: "runatlantis", Name: "atlantis", } - err = client.UpdateStatus(repo, models.PullRequest{ - Num: 1, - BaseRepo: repo, - HeadCommit: "sha", - HeadBranch: "test", - }, c.status, "src", "description", "https://google.com") + err = client.UpdateStatus( + logger, + repo, + models.PullRequest{ + Num: 1, + BaseRepo: repo, + HeadCommit: "sha", + HeadBranch: "test", + }, c.status, "src", "description", "https://google.com") Ok(t, err) Assert(t, gotRequest, "expected to get the request") }) } } +func TestGitlabClient_UpdateStatusRetryable(t *testing.T) { + logger := logging.NewNoopLogger(t) + pipelineSuccess, err := os.ReadFile("testdata/gitlab-pipeline-success.json") + Ok(t, err) + + cases := []struct { + status models.CommitStatus + numberOfConflicts int + expNumberOfRequests int + expState string + expError bool + }{ + // Ensure that 0 x 409 Conflict succeeds + { + status: models.PendingCommitStatus, + numberOfConflicts: 0, + expNumberOfRequests: 1, + expState: "running", + }, + // Ensure that 5 x 409 Conflict still succeeds + { + status: models.PendingCommitStatus, + numberOfConflicts: 5, + expNumberOfRequests: 6, + expState: "running", + }, + // Ensure that 10 x 409 Conflict still fail due to running out of retries + { + status: models.FailedCommitStatus, + numberOfConflicts: 100, // anything larger than 10 is fine + expNumberOfRequests: 10, + expState: "failed", + expError: true, + }, + } + for _, c := range cases { + t.Run(c.expState, func(t *testing.T) { + handledNumberOfRequests := 0 + + testServer := httptest.NewServer( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/api/v4/projects/runatlantis%2Fatlantis/statuses/sha": + handledNumberOfRequests++ + shouldSendConflict := handledNumberOfRequests <= c.numberOfConflicts + + body, err := io.ReadAll(r.Body) + Ok(t, err) + exp := fmt.Sprintf(`{"state":"%s","context":"src","target_url":"https://google.com","description":"description","pipeline_id":%d}`, c.expState, gitlabPipelineSuccessMrID) + Equals(t, exp, string(body)) + defer r.Body.Close() // nolint: errcheck + + if shouldSendConflict { + w.WriteHeader(http.StatusConflict) + } + + w.Write([]byte("{}")) // nolint: errcheck + + case "/api/v4/projects/runatlantis%2Fatlantis/merge_requests/1": + w.WriteHeader(http.StatusOK) + w.Write(pipelineSuccess) // nolint: errcheck + + case "/api/v4/": + // Rate limiter requests. + w.WriteHeader(http.StatusOK) + + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + } + })) + + internalClient, err := gitlab.NewClient("token", gitlab.WithBaseURL(testServer.URL)) + Ok(t, err) + client := &GitlabClient{ + Client: internalClient, + Version: nil, + PollingInterval: 10 * time.Millisecond, + } + + repo := models.Repo{ + FullName: "runatlantis/atlantis", + Owner: "runatlantis", + Name: "atlantis", + } + err = client.UpdateStatus( + logger, + repo, + models.PullRequest{ + Num: 1, + BaseRepo: repo, + HeadCommit: "sha", + HeadBranch: "test", + }, c.status, "src", "description", "https://google.com") + + if c.expError { + ErrContains(t, "failed to update commit status for 'runatlantis/atlantis' @ 'sha' to 'src' after 10 attempts", err) + ErrContains(t, "409", err) + } else { + Ok(t, err) + } + + Assert(t, c.expNumberOfRequests == handledNumberOfRequests, fmt.Sprintf("expected %d number of requests, but processed %d", c.expNumberOfRequests, handledNumberOfRequests)) + }) + } +} + func TestGitlabClient_PullIsMergeable(t *testing.T) { + logger := logging.NewNoopLogger(t) gitlabClientUnderTest = true gitlabVersionOver15_6 := "15.8.3-ee" gitlabVersion15_6 := "15.6.0-ee" @@ -351,6 +466,7 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { noHeadPipelineMR := 2 ciMustPassSuccessMR := 3 ciMustPassFailureMR := 4 + needRebaseMR := 5 pipelineSuccess, err := os.ReadFile("testdata/gitlab-pipeline-success.json") Ok(t, err) @@ -361,6 +477,9 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { detailedMergeStatusCiMustPass, err := os.ReadFile("testdata/gitlab-detailed-merge-status-ci-must-pass.json") Ok(t, err) + detailedMergeStatusNeedRebase, err := os.ReadFile("testdata/gitlab-detailed-merge-status-need-rebase.json") + Ok(t, err) + headPipelineNotAvailable, err := os.ReadFile("testdata/gitlab-head-pipeline-not-available.json") Ok(t, err) @@ -420,6 +539,13 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { ciMustPassFailureMR, false, }, + { + fmt.Sprintf("%s/apply", vcsStatusName), + models.FailedCommitStatus, + gitlabServerVersions, + needRebaseMR, + true, + }, { fmt.Sprintf("%s/apply: resource/default", vcsStatusName), models.FailedCommitStatus, @@ -484,6 +610,9 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { case fmt.Sprintf("/api/v4/projects/runatlantis%%2Fatlantis/merge_requests/%v", ciMustPassFailureMR): w.WriteHeader(http.StatusOK) w.Write(detailedMergeStatusCiMustPass) // nolint: errcheck + case fmt.Sprintf("/api/v4/projects/runatlantis%%2Fatlantis/merge_requests/%v", needRebaseMR): + w.WriteHeader(http.StatusOK) + w.Write(detailedMergeStatusNeedRebase) // nolint: errcheck case fmt.Sprintf("/api/v4/projects/%v", projectID): w.WriteHeader(http.StatusOK) w.Write(projectSuccess) // nolint: errcheck @@ -511,7 +640,6 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { client := &GitlabClient{ Client: internalClient, Version: nil, - logger: logging.NewNoopLogger(t), } repo := models.Repo{ @@ -524,11 +652,14 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { }, } - mergeable, err := client.PullIsMergeable(repo, models.PullRequest{ - Num: c.mrID, - BaseRepo: repo, - HeadCommit: "67cb91d3f6198189f433c045154a885784ba6977", - }, vcsStatusName) + mergeable, err := client.PullIsMergeable( + logger, + repo, + models.PullRequest{ + Num: c.mrID, + BaseRepo: repo, + HeadCommit: "67cb91d3f6198189f433c045154a885784ba6977", + }, vcsStatusName, []string{}) Ok(t, err) Equals(t, c.expState, mergeable) @@ -538,9 +669,10 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { } func TestGitlabClient_MarkdownPullLink(t *testing.T) { + logger := logging.NewNoopLogger(t) gitlabClientUnderTest = true defer func() { gitlabClientUnderTest = false }() - client, err := NewGitlabClient("gitlab.com", "token", nil) + client, err := NewGitlabClient("gitlab.com", "token", logger) Ok(t, err) pull := models.PullRequest{Num: 1} s, _ := client.MarkdownPullLink(pull) @@ -549,6 +681,7 @@ func TestGitlabClient_MarkdownPullLink(t *testing.T) { } func TestGitlabClient_HideOldComments(t *testing.T) { + logger := logging.NewNoopLogger(t) type notePutCallDetails struct { noteID string comment []string @@ -673,10 +806,9 @@ func TestGitlabClient_HideOldComments(t *testing.T) { client := &GitlabClient{ Client: internalClient, Version: nil, - logger: logging.NewNoopLogger(t), } - err = client.HidePrevCommandComments(repo, pullNum, command.Plan.TitleString(), c.dir) + err = client.HidePrevCommandComments(logger, repo, pullNum, command.Plan.TitleString(), c.dir) Ok(t, err) // Check the correct number of plan comments have been processed @@ -693,6 +825,7 @@ func TestGitlabClient_HideOldComments(t *testing.T) { } func TestGithubClient_GetPullLabels(t *testing.T) { + logger := logging.NewNoopLogger(t) mergeSuccessWithLabel, err := os.ReadFile("testdata/gitlab-merge-success-with-label.json") Ok(t, err) @@ -713,19 +846,23 @@ func TestGithubClient_GetPullLabels(t *testing.T) { client := &GitlabClient{ Client: internalClient, Version: nil, - logger: logging.NewNoopLogger(t), } - labels, err := client.GetPullLabels(models.Repo{ - FullName: "runatlantis/atlantis", - }, models.PullRequest{ - Num: 1, - }) + labels, err := client.GetPullLabels( + logger, + models.Repo{ + FullName: "runatlantis/atlantis", + }, + models.PullRequest{ + Num: 1, + }, + ) Ok(t, err) Equals(t, []string{"work in progress"}, labels) } func TestGithubClient_GetPullLabels_EmptyResponse(t *testing.T) { + logger := logging.NewNoopLogger(t) pipelineSuccess, err := os.ReadFile("testdata/gitlab-pipeline-success.json") Ok(t, err) @@ -746,14 +883,15 @@ func TestGithubClient_GetPullLabels_EmptyResponse(t *testing.T) { client := &GitlabClient{ Client: internalClient, Version: nil, - logger: logging.NewNoopLogger(t), } - labels, err := client.GetPullLabels(models.Repo{ - FullName: "runatlantis/atlantis", - }, models.PullRequest{ - Num: 1, - }) + labels, err := client.GetPullLabels( + logger, + models.Repo{ + FullName: "runatlantis/atlantis", + }, models.PullRequest{ + Num: 1, + }) Ok(t, err) Equals(t, 0, len(labels)) } diff --git a/server/events/vcs/instrumented_client.go b/server/events/vcs/instrumented_client.go index 83dc0b2873..32022438f1 100644 --- a/server/events/vcs/instrumented_client.go +++ b/server/events/vcs/instrumented_client.go @@ -1,10 +1,9 @@ package vcs import ( - "fmt" "strconv" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" "github.com/runatlantis/atlantis/server/metrics" @@ -32,7 +31,7 @@ func NewInstrumentedGithubClient(client *GithubClient, statsScope tally.Scope, l //go:generate pegomock generate --package mocks -o mocks/mock_github_pull_request_getter.go GithubPullRequestGetter type GithubPullRequestGetter interface { - GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) + GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*github.PullRequest, error) } // IGithubClient exists to bridge the gap between GithubPullRequestGetter and Client interface to allow @@ -51,13 +50,9 @@ type InstrumentedGithubClient struct { Logger logging.SimpleLogging } -func (c *InstrumentedGithubClient) GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) { +func (c *InstrumentedGithubClient) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*github.PullRequest, error) { scope := c.StatsScope.SubScope("get_pull_request") scope = SetGitScopeTags(scope, repo.FullName, pullNum) - logger := c.Logger.WithHistory([]interface{}{ - "repository", fmt.Sprintf("%s/%s", repo.Owner, repo.Name), - "pull-num", strconv.Itoa(pullNum), - }...) executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() defer executionTime.Stop() @@ -65,7 +60,7 @@ func (c *InstrumentedGithubClient) GetPullRequest(repo models.Repo, pullNum int) executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) executionError := scope.Counter(metrics.ExecutionErrorMetric) - pull, err := c.PullRequestGetter.GetPullRequest(repo, pullNum) + pull, err := c.PullRequestGetter.GetPullRequest(logger, repo, pullNum) if err != nil { executionError.Inc(1) @@ -84,10 +79,9 @@ type InstrumentedClient struct { Logger logging.SimpleLogging } -func (c *InstrumentedClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (c *InstrumentedClient) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { scope := c.StatsScope.SubScope("get_modified_files") scope = SetGitScopeTags(scope, repo.FullName, pull.Num) - logger := c.Logger.WithHistory(fmtLogSrc(repo, pull.Num)...) executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() defer executionTime.Stop() @@ -95,7 +89,7 @@ func (c *InstrumentedClient) GetModifiedFiles(repo models.Repo, pull models.Pull executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) executionError := scope.Counter(metrics.ExecutionErrorMetric) - files, err := c.Client.GetModifiedFiles(repo, pull) + files, err := c.Client.GetModifiedFiles(logger, repo, pull) if err != nil { executionError.Inc(1) @@ -107,10 +101,9 @@ func (c *InstrumentedClient) GetModifiedFiles(repo models.Repo, pull models.Pull return files, err } -func (c *InstrumentedClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { +func (c *InstrumentedClient) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) error { scope := c.StatsScope.SubScope("create_comment") scope = SetGitScopeTags(scope, repo.FullName, pullNum) - logger := c.Logger.WithHistory(fmtLogSrc(repo, pullNum)...) executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() defer executionTime.Stop() @@ -118,7 +111,7 @@ func (c *InstrumentedClient) CreateComment(repo models.Repo, pullNum int, commen executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) executionError := scope.Counter(metrics.ExecutionErrorMetric) - if err := c.Client.CreateComment(repo, pullNum, comment, command); err != nil { + if err := c.Client.CreateComment(logger, repo, pullNum, comment, command); err != nil { executionError.Inc(1) logger.Err("Unable to create comment for command %s, error: %s", command, err.Error()) return err @@ -128,7 +121,7 @@ func (c *InstrumentedClient) CreateComment(repo models.Repo, pullNum int, commen return nil } -func (c *InstrumentedClient) ReactToComment(repo models.Repo, pullNum int, commentID int64, reaction string) error { +func (c *InstrumentedClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error { scope := c.StatsScope.SubScope("react_to_comment") executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() @@ -137,9 +130,9 @@ func (c *InstrumentedClient) ReactToComment(repo models.Repo, pullNum int, comme executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) executionError := scope.Counter(metrics.ExecutionErrorMetric) - if err := c.Client.ReactToComment(repo, pullNum, commentID, reaction); err != nil { + if err := c.Client.ReactToComment(logger, repo, pullNum, commentID, reaction); err != nil { executionError.Inc(1) - c.Logger.Err("Unable to react to comment, error: %s", err.Error()) + logger.Err("Unable to react to comment, error: %s", err.Error()) return err } @@ -147,10 +140,9 @@ func (c *InstrumentedClient) ReactToComment(repo models.Repo, pullNum int, comme return nil } -func (c *InstrumentedClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string, dir string) error { +func (c *InstrumentedClient) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error { scope := c.StatsScope.SubScope("hide_prev_plan_comments") scope = SetGitScopeTags(scope, repo.FullName, pullNum) - logger := c.Logger.WithHistory(fmtLogSrc(repo, pullNum)...) executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() defer executionTime.Stop() @@ -158,7 +150,7 @@ func (c *InstrumentedClient) HidePrevCommandComments(repo models.Repo, pullNum i executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) executionError := scope.Counter(metrics.ExecutionErrorMetric) - if err := c.Client.HidePrevCommandComments(repo, pullNum, command, dir); err != nil { + if err := c.Client.HidePrevCommandComments(logger, repo, pullNum, command, dir); err != nil { executionError.Inc(1) logger.Err("Unable to hide previous %s comments, error: %s", command, err.Error()) return err @@ -169,10 +161,9 @@ func (c *InstrumentedClient) HidePrevCommandComments(repo models.Repo, pullNum i } -func (c *InstrumentedClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { +func (c *InstrumentedClient) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { scope := c.StatsScope.SubScope("pull_is_approved") scope = SetGitScopeTags(scope, repo.FullName, pull.Num) - logger := c.Logger.WithHistory(fmtLogSrc(repo, pull.Num)...) executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() defer executionTime.Stop() @@ -180,7 +171,7 @@ func (c *InstrumentedClient) PullIsApproved(repo models.Repo, pull models.PullRe executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) executionError := scope.Counter(metrics.ExecutionErrorMetric) - approved, err := c.Client.PullIsApproved(repo, pull) + approved, err := c.Client.PullIsApproved(logger, repo, pull) if err != nil { executionError.Inc(1) @@ -192,10 +183,9 @@ func (c *InstrumentedClient) PullIsApproved(repo models.Repo, pull models.PullRe return approved, err } -func (c *InstrumentedClient) PullIsMergeable(repo models.Repo, pull models.PullRequest, vcsstatusname string) (bool, error) { +func (c *InstrumentedClient) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, vcsstatusname string, ignoreVCSStatusNames []string) (bool, error) { scope := c.StatsScope.SubScope("pull_is_mergeable") scope = SetGitScopeTags(scope, repo.FullName, pull.Num) - logger := c.Logger.WithHistory(fmtLogSrc(repo, pull.Num)...) executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() defer executionTime.Stop() @@ -203,7 +193,7 @@ func (c *InstrumentedClient) PullIsMergeable(repo models.Repo, pull models.PullR executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) executionError := scope.Counter(metrics.ExecutionErrorMetric) - mergeable, err := c.Client.PullIsMergeable(repo, pull, vcsstatusname) + mergeable, err := c.Client.PullIsMergeable(logger, repo, pull, vcsstatusname, ignoreVCSStatusNames) if err != nil { executionError.Inc(1) @@ -215,17 +205,9 @@ func (c *InstrumentedClient) PullIsMergeable(repo models.Repo, pull models.PullR return mergeable, err } -func (c *InstrumentedClient) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { +func (c *InstrumentedClient) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { scope := c.StatsScope.SubScope("update_status") scope = SetGitScopeTags(scope, repo.FullName, pull.Num) - logger := c.Logger.WithHistory([]interface{}{ - "repository", fmt.Sprintf("%s/%s", repo.Owner, repo.Name), - "pull-num", strconv.Itoa(pull.Num), - "src", src, - "description", description, - "state", state, - "url", url, - }...) executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() defer executionTime.Stop() @@ -234,7 +216,7 @@ func (c *InstrumentedClient) UpdateStatus(repo models.Repo, pull models.PullRequ executionError := scope.Counter(metrics.ExecutionErrorMetric) logger.Info("updating vcs status") - if err := c.Client.UpdateStatus(repo, pull, state, src, description, url); err != nil { + if err := c.Client.UpdateStatus(logger, repo, pull, state, src, description, url); err != nil { executionError.Inc(1) logger.Err("Unable to update status at url: %s, error: %s", url, err.Error()) return err @@ -244,10 +226,9 @@ func (c *InstrumentedClient) UpdateStatus(repo models.Repo, pull models.PullRequ return nil } -func (c *InstrumentedClient) MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) error { +func (c *InstrumentedClient) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error { scope := c.StatsScope.SubScope("merge_pull") scope = SetGitScopeTags(scope, pull.BaseRepo.FullName, pull.Num) - logger := c.Logger.WithHistory("pull-num", pull.Num) executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() defer executionTime.Stop() @@ -255,7 +236,7 @@ func (c *InstrumentedClient) MergePull(pull models.PullRequest, pullOptions mode executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) executionError := scope.Counter(metrics.ExecutionErrorMetric) - if err := c.Client.MergePull(pull, pullOptions); err != nil { + if err := c.Client.MergePull(logger, pull, pullOptions); err != nil { executionError.Inc(1) logger.Err("Unable to merge pull, error: %s", err.Error()) return err @@ -265,14 +246,6 @@ func (c *InstrumentedClient) MergePull(pull models.PullRequest, pullOptions mode return nil } -// taken from other parts of the code, would be great to have this in a shared spot -func fmtLogSrc(repo models.Repo, pullNum int) []interface{} { - return []interface{}{ - "repository", repo.FullName, - "pull-num", strconv.Itoa(pullNum), - } -} - func SetGitScopeTags(scope tally.Scope, repoFullName string, pullNum int) tally.Scope { return scope.Tagged(map[string]string{ "base_repo": repoFullName, diff --git a/server/events/vcs/mocks/mock_client.go b/server/events/vcs/mocks/mock_client.go index 5b0bb3051a..f51036b87a 100644 --- a/server/events/vcs/mocks/mock_client.go +++ b/server/events/vcs/mocks/mock_client.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockClient(options ...pegomock.Option) *MockClient { func (mock *MockClient) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockClient) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { +func (mock *MockClient) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pullNum, comment, command} + params := []pegomock.Param{logger, repo, pullNum, comment, command} result := pegomock.GetGenericMockFrom(mock).Invoke("CreateComment", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -55,11 +56,11 @@ func (mock *MockClient) DiscardReviews(repo models.Repo, pull models.PullRequest return ret0 } -func (mock *MockClient) GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) { +func (mock *MockClient) GetCloneURL(logger logging.SimpleLogging, VCSHostType models.VCSHostType, repo string) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{VCSHostType, repo} + params := []pegomock.Param{logger, VCSHostType, repo} result := pegomock.GetGenericMockFrom(mock).Invoke("GetCloneURL", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 error @@ -74,11 +75,11 @@ func (mock *MockClient) GetCloneURL(VCSHostType models.VCSHostType, repo string) return ret0, ret1 } -func (mock *MockClient) GetFileContent(pull models.PullRequest, fileName string) (bool, []byte, error) { +func (mock *MockClient) GetFileContent(logger logging.SimpleLogging, pull models.PullRequest, fileName string) (bool, []byte, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{pull, fileName} + params := []pegomock.Param{logger, pull, fileName} result := pegomock.GetGenericMockFrom(mock).Invoke("GetFileContent", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*[]byte)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 bool var ret1 []byte @@ -97,11 +98,11 @@ func (mock *MockClient) GetFileContent(pull models.PullRequest, fileName string) return ret0, ret1, ret2 } -func (mock *MockClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (mock *MockClient) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pull} + params := []pegomock.Param{logger, repo, pull} result := pegomock.GetGenericMockFrom(mock).Invoke("GetModifiedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 []string var ret1 error @@ -116,11 +117,11 @@ func (mock *MockClient) GetModifiedFiles(repo models.Repo, pull models.PullReque return ret0, ret1 } -func (mock *MockClient) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { +func (mock *MockClient) GetPullLabels(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pull} + params := []pegomock.Param{logger, repo, pull} result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullLabels", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 []string var ret1 error @@ -154,11 +155,11 @@ func (mock *MockClient) GetTeamNamesForUser(repo models.Repo, user models.User) return ret0, ret1 } -func (mock *MockClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string, dir string) error { +func (mock *MockClient) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pullNum, command, dir} + params := []pegomock.Param{logger, repo, pullNum, command, dir} result := pegomock.GetGenericMockFrom(mock).Invoke("HidePrevCommandComments", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -188,11 +189,11 @@ func (mock *MockClient) MarkdownPullLink(pull models.PullRequest) (string, error return ret0, ret1 } -func (mock *MockClient) MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) error { +func (mock *MockClient) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{pull, pullOptions} + params := []pegomock.Param{logger, pull, pullOptions} result := pegomock.GetGenericMockFrom(mock).Invoke("MergePull", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -203,11 +204,11 @@ func (mock *MockClient) MergePull(pull models.PullRequest, pullOptions models.Pu return ret0 } -func (mock *MockClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { +func (mock *MockClient) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pull} + params := []pegomock.Param{logger, repo, pull} result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*models.ApprovalStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 models.ApprovalStatus var ret1 error @@ -222,11 +223,11 @@ func (mock *MockClient) PullIsApproved(repo models.Repo, pull models.PullRequest return ret0, ret1 } -func (mock *MockClient) PullIsMergeable(repo models.Repo, pull models.PullRequest, vcsstatusname string) (bool, error) { +func (mock *MockClient) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, vcsstatusname string, ignoreVCSStatusNames []string) (bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pull, vcsstatusname} + params := []pegomock.Param{logger, repo, pull, vcsstatusname, ignoreVCSStatusNames} result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsMergeable", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 bool var ret1 error @@ -241,11 +242,11 @@ func (mock *MockClient) PullIsMergeable(repo models.Repo, pull models.PullReques return ret0, ret1 } -func (mock *MockClient) ReactToComment(repo models.Repo, pullNum int, commentID int64, reaction string) error { +func (mock *MockClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pullNum, commentID, reaction} + params := []pegomock.Param{logger, repo, pullNum, commentID, reaction} result := pegomock.GetGenericMockFrom(mock).Invoke("ReactToComment", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -271,11 +272,11 @@ func (mock *MockClient) SupportsSingleFileDownload(repo models.Repo) bool { return ret0 } -func (mock *MockClient) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { +func (mock *MockClient) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") } - params := []pegomock.Param{repo, pull, state, src, description, url} + params := []pegomock.Param{logger, repo, pull, state, src, description, url} result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateStatus", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -323,8 +324,8 @@ type VerifierMockClient struct { timeout time.Duration } -func (verifier *VerifierMockClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) *MockClient_CreateComment_OngoingVerification { - params := []pegomock.Param{repo, pullNum, comment, command} +func (verifier *VerifierMockClient) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) *MockClient_CreateComment_OngoingVerification { + params := []pegomock.Param{logger, repo, pullNum, comment, command} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CreateComment", params, verifier.timeout) return &MockClient_CreateComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -334,30 +335,34 @@ type MockClient_CreateComment_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_CreateComment_OngoingVerification) GetCapturedArguments() (models.Repo, int, string, string) { - repo, pullNum, comment, command := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1], comment[len(comment)-1], command[len(command)-1] +func (c *MockClient_CreateComment_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, int, string, string) { + logger, repo, pullNum, comment, command := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pullNum[len(pullNum)-1], comment[len(comment)-1], command[len(command)-1] } -func (c *MockClient_CreateComment_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string, _param3 []string) { +func (c *MockClient_CreateComment_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []int, _param3 []string, _param4 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]int, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(int) } _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { _param3[u] = param.(string) } + _param4 = make([]string, len(c.methodInvocations)) + for u, param := range params[4] { + _param4[u] = param.(string) + } } return } @@ -393,8 +398,8 @@ func (c *MockClient_DiscardReviews_OngoingVerification) GetAllCapturedArguments( return } -func (verifier *VerifierMockClient) GetCloneURL(VCSHostType models.VCSHostType, repo string) *MockClient_GetCloneURL_OngoingVerification { - params := []pegomock.Param{VCSHostType, repo} +func (verifier *VerifierMockClient) GetCloneURL(logger logging.SimpleLogging, VCSHostType models.VCSHostType, repo string) *MockClient_GetCloneURL_OngoingVerification { + params := []pegomock.Param{logger, VCSHostType, repo} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetCloneURL", params, verifier.timeout) return &MockClient_GetCloneURL_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -404,28 +409,32 @@ type MockClient_GetCloneURL_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_GetCloneURL_OngoingVerification) GetCapturedArguments() (models.VCSHostType, string) { - VCSHostType, repo := c.GetAllCapturedArguments() - return VCSHostType[len(VCSHostType)-1], repo[len(repo)-1] +func (c *MockClient_GetCloneURL_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.VCSHostType, string) { + logger, VCSHostType, repo := c.GetAllCapturedArguments() + return logger[len(logger)-1], VCSHostType[len(VCSHostType)-1], repo[len(repo)-1] } -func (c *MockClient_GetCloneURL_OngoingVerification) GetAllCapturedArguments() (_param0 []models.VCSHostType, _param1 []string) { +func (c *MockClient_GetCloneURL_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.VCSHostType, _param2 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.VCSHostType, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.VCSHostType) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]string, len(c.methodInvocations)) + _param1 = make([]models.VCSHostType, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(string) + _param1[u] = param.(models.VCSHostType) + } + _param2 = make([]string, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(string) } } return } -func (verifier *VerifierMockClient) GetFileContent(pull models.PullRequest, fileName string) *MockClient_GetFileContent_OngoingVerification { - params := []pegomock.Param{pull, fileName} +func (verifier *VerifierMockClient) GetFileContent(logger logging.SimpleLogging, pull models.PullRequest, fileName string) *MockClient_GetFileContent_OngoingVerification { + params := []pegomock.Param{logger, pull, fileName} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetFileContent", params, verifier.timeout) return &MockClient_GetFileContent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -435,28 +444,32 @@ type MockClient_GetFileContent_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_GetFileContent_OngoingVerification) GetCapturedArguments() (models.PullRequest, string) { - pull, fileName := c.GetAllCapturedArguments() - return pull[len(pull)-1], fileName[len(fileName)-1] +func (c *MockClient_GetFileContent_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.PullRequest, string) { + logger, pull, fileName := c.GetAllCapturedArguments() + return logger[len(logger)-1], pull[len(pull)-1], fileName[len(fileName)-1] } -func (c *MockClient_GetFileContent_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest, _param1 []string) { +func (c *MockClient_GetFileContent_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.PullRequest, _param2 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]string, len(c.methodInvocations)) + _param1 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(string) + _param1[u] = param.(models.PullRequest) + } + _param2 = make([]string, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(string) } } return } -func (verifier *VerifierMockClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) *MockClient_GetModifiedFiles_OngoingVerification { - params := []pegomock.Param{repo, pull} +func (verifier *VerifierMockClient) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) *MockClient_GetModifiedFiles_OngoingVerification { + params := []pegomock.Param{logger, repo, pull} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetModifiedFiles", params, verifier.timeout) return &MockClient_GetModifiedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -466,28 +479,32 @@ type MockClient_GetModifiedFiles_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_GetModifiedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - repo, pull := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1] +func (c *MockClient_GetModifiedFiles_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, repo, pull := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pull[len(pull)-1] } -func (c *MockClient_GetModifiedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockClient_GetModifiedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockClient) GetPullLabels(repo models.Repo, pull models.PullRequest) *MockClient_GetPullLabels_OngoingVerification { - params := []pegomock.Param{repo, pull} +func (verifier *VerifierMockClient) GetPullLabels(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) *MockClient_GetPullLabels_OngoingVerification { + params := []pegomock.Param{logger, repo, pull} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullLabels", params, verifier.timeout) return &MockClient_GetPullLabels_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -497,21 +514,25 @@ type MockClient_GetPullLabels_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_GetPullLabels_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - repo, pull := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1] +func (c *MockClient_GetPullLabels_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, repo, pull := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pull[len(pull)-1] } -func (c *MockClient_GetPullLabels_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockClient_GetPullLabels_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return @@ -548,8 +569,8 @@ func (c *MockClient_GetTeamNamesForUser_OngoingVerification) GetAllCapturedArgum return } -func (verifier *VerifierMockClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string, dir string) *MockClient_HidePrevCommandComments_OngoingVerification { - params := []pegomock.Param{repo, pullNum, command, dir} +func (verifier *VerifierMockClient) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) *MockClient_HidePrevCommandComments_OngoingVerification { + params := []pegomock.Param{logger, repo, pullNum, command, dir} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HidePrevCommandComments", params, verifier.timeout) return &MockClient_HidePrevCommandComments_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -559,30 +580,34 @@ type MockClient_HidePrevCommandComments_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetCapturedArguments() (models.Repo, int, string, string) { - repo, pullNum, command, dir := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1], command[len(command)-1], dir[len(dir)-1] +func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, int, string, string) { + logger, repo, pullNum, command, dir := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pullNum[len(pullNum)-1], command[len(command)-1], dir[len(dir)-1] } -func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string, _param3 []string) { +func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []int, _param3 []string, _param4 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]int, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(int) } _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { _param3[u] = param.(string) } + _param4 = make([]string, len(c.methodInvocations)) + for u, param := range params[4] { + _param4[u] = param.(string) + } } return } @@ -614,8 +639,8 @@ func (c *MockClient_MarkdownPullLink_OngoingVerification) GetAllCapturedArgument return } -func (verifier *VerifierMockClient) MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) *MockClient_MergePull_OngoingVerification { - params := []pegomock.Param{pull, pullOptions} +func (verifier *VerifierMockClient) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) *MockClient_MergePull_OngoingVerification { + params := []pegomock.Param{logger, pull, pullOptions} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MergePull", params, verifier.timeout) return &MockClient_MergePull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -625,28 +650,32 @@ type MockClient_MergePull_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_MergePull_OngoingVerification) GetCapturedArguments() (models.PullRequest, models.PullRequestOptions) { - pull, pullOptions := c.GetAllCapturedArguments() - return pull[len(pull)-1], pullOptions[len(pullOptions)-1] +func (c *MockClient_MergePull_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.PullRequest, models.PullRequestOptions) { + logger, pull, pullOptions := c.GetAllCapturedArguments() + return logger[len(logger)-1], pull[len(pull)-1], pullOptions[len(pullOptions)-1] } -func (c *MockClient_MergePull_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest, _param1 []models.PullRequestOptions) { +func (c *MockClient_MergePull_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.PullRequest, _param2 []models.PullRequestOptions) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequestOptions, len(c.methodInvocations)) + _param1 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequestOptions) + _param1[u] = param.(models.PullRequest) + } + _param2 = make([]models.PullRequestOptions, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequestOptions) } } return } -func (verifier *VerifierMockClient) PullIsApproved(repo models.Repo, pull models.PullRequest) *MockClient_PullIsApproved_OngoingVerification { - params := []pegomock.Param{repo, pull} +func (verifier *VerifierMockClient) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) *MockClient_PullIsApproved_OngoingVerification { + params := []pegomock.Param{logger, repo, pull} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) return &MockClient_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -656,28 +685,32 @@ type MockClient_PullIsApproved_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - repo, pull := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1] +func (c *MockClient_PullIsApproved_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, repo, pull := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pull[len(pull)-1] } -func (c *MockClient_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockClient_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockClient) PullIsMergeable(repo models.Repo, pull models.PullRequest, vcsstatusname string) *MockClient_PullIsMergeable_OngoingVerification { - params := []pegomock.Param{repo, pull, vcsstatusname} +func (verifier *VerifierMockClient) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, vcsstatusname string) *MockClient_PullIsMergeable_OngoingVerification { + params := []pegomock.Param{logger, repo, pull, vcsstatusname} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsMergeable", params, verifier.timeout) return &MockClient_PullIsMergeable_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -687,32 +720,36 @@ type MockClient_PullIsMergeable_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_PullIsMergeable_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - repo, pull, vcsstatusname := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1], vcsstatusname[len(vcsstatusname)-1] +func (c *MockClient_PullIsMergeable_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, repo, pull, vcsstatusname := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pull[len(pull)-1], vcsstatusname[len(vcsstatusname)-1] } -func (c *MockClient_PullIsMergeable_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockClient_PullIsMergeable_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return } -func (verifier *VerifierMockClient) ReactToComment(repo models.Repo, pullNum int, commentID int64, reaction string) *MockClient_ReactToComment_OngoingVerification { - params := []pegomock.Param{repo, pullNum, commentID, reaction} +func (verifier *VerifierMockClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) *MockClient_ReactToComment_OngoingVerification { + params := []pegomock.Param{logger, repo, pullNum, commentID, reaction} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ReactToComment", params, verifier.timeout) return &MockClient_ReactToComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -722,29 +759,33 @@ type MockClient_ReactToComment_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_ReactToComment_OngoingVerification) GetCapturedArguments() (models.Repo, int, int64, string) { - repo, pullNum, commentID, reaction := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1], commentID[len(commentID)-1], reaction[len(reaction)-1] +func (c *MockClient_ReactToComment_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, int, int64, string) { + logger, repo, pullNum, commentID, reaction := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pullNum[len(pullNum)-1], commentID[len(commentID)-1], reaction[len(reaction)-1] } -func (c *MockClient_ReactToComment_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []int64, _param3 []string) { +func (c *MockClient_ReactToComment_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []int, _param3 []int64, _param4 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(models.Repo) } - _param2 = make([]int64, len(c.methodInvocations)) + _param2 = make([]int, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(int64) + _param2[u] = param.(int) } - _param3 = make([]string, len(c.methodInvocations)) + _param3 = make([]int64, len(c.methodInvocations)) for u, param := range params[3] { - _param3[u] = param.(string) + _param3[u] = param.(int64) + } + _param4 = make([]string, len(c.methodInvocations)) + for u, param := range params[4] { + _param4[u] = param.(string) } } return @@ -777,8 +818,8 @@ func (c *MockClient_SupportsSingleFileDownload_OngoingVerification) GetAllCaptur return } -func (verifier *VerifierMockClient) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) *MockClient_UpdateStatus_OngoingVerification { - params := []pegomock.Param{repo, pull, state, src, description, url} +func (verifier *VerifierMockClient) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) *MockClient_UpdateStatus_OngoingVerification { + params := []pegomock.Param{logger, repo, pull, state, src, description, url} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateStatus", params, verifier.timeout) return &MockClient_UpdateStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -788,29 +829,29 @@ type MockClient_UpdateStatus_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockClient_UpdateStatus_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, models.CommitStatus, string, string, string) { - repo, pull, state, src, description, url := c.GetAllCapturedArguments() - return repo[len(repo)-1], pull[len(pull)-1], state[len(state)-1], src[len(src)-1], description[len(description)-1], url[len(url)-1] +func (c *MockClient_UpdateStatus_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, models.CommitStatus, string, string, string) { + logger, repo, pull, state, src, description, url := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pull[len(pull)-1], state[len(state)-1], src[len(src)-1], description[len(description)-1], url[len(url)-1] } -func (c *MockClient_UpdateStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []models.CommitStatus, _param3 []string, _param4 []string, _param5 []string) { +func (c *MockClient_UpdateStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []models.CommitStatus, _param4 []string, _param5 []string, _param6 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]models.CommitStatus, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(models.CommitStatus) + _param2[u] = param.(models.PullRequest) } - _param3 = make([]string, len(c.methodInvocations)) + _param3 = make([]models.CommitStatus, len(c.methodInvocations)) for u, param := range params[3] { - _param3[u] = param.(string) + _param3[u] = param.(models.CommitStatus) } _param4 = make([]string, len(c.methodInvocations)) for u, param := range params[4] { @@ -820,6 +861,10 @@ func (c *MockClient_UpdateStatus_OngoingVerification) GetAllCapturedArguments() for u, param := range params[5] { _param5[u] = param.(string) } + _param6 = make([]string, len(c.methodInvocations)) + for u, param := range params[6] { + _param6[u] = param.(string) + } } return } diff --git a/server/events/vcs/mocks/mock_github_pull_request_getter.go b/server/events/vcs/mocks/mock_github_pull_request_getter.go index 3d12a38a0a..995281b4c5 100644 --- a/server/events/vcs/mocks/mock_github_pull_request_getter.go +++ b/server/events/vcs/mocks/mock_github_pull_request_getter.go @@ -4,9 +4,10 @@ package mocks import ( - github "github.com/google/go-github/v58/github" + github "github.com/google/go-github/v65/github" pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -26,11 +27,11 @@ func NewMockGithubPullRequestGetter(options ...pegomock.Option) *MockGithubPullR func (mock *MockGithubPullRequestGetter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockGithubPullRequestGetter) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockGithubPullRequestGetter) GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) { +func (mock *MockGithubPullRequestGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*github.PullRequest, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockGithubPullRequestGetter().") } - params := []pegomock.Param{repo, pullNum} + params := []pegomock.Param{logger, repo, pullNum} result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequest", params, []reflect.Type{reflect.TypeOf((**github.PullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 *github.PullRequest var ret1 error @@ -82,8 +83,8 @@ type VerifierMockGithubPullRequestGetter struct { timeout time.Duration } -func (verifier *VerifierMockGithubPullRequestGetter) GetPullRequest(repo models.Repo, pullNum int) *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification { - params := []pegomock.Param{repo, pullNum} +func (verifier *VerifierMockGithubPullRequestGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification { + params := []pegomock.Param{logger, repo, pullNum} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequest", params, verifier.timeout) return &MockGithubPullRequestGetter_GetPullRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -93,21 +94,25 @@ type MockGithubPullRequestGetter_GetPullRequest_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (models.Repo, int) { - repo, pullNum := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1] +func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, int) { + logger, repo, pullNum := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pullNum[len(pullNum)-1] } -func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int) { +func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []int) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(models.Repo) + } + _param2 = make([]int, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(int) } } return diff --git a/server/events/vcs/mocks/mock_pull_req_status_fetcher.go b/server/events/vcs/mocks/mock_pull_req_status_fetcher.go index 3e13d29990..d9e6494d9a 100644 --- a/server/events/vcs/mocks/mock_pull_req_status_fetcher.go +++ b/server/events/vcs/mocks/mock_pull_req_status_fetcher.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockPullReqStatusFetcher(options ...pegomock.Option) *MockPullReqStatusF func (mock *MockPullReqStatusFetcher) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockPullReqStatusFetcher) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockPullReqStatusFetcher) FetchPullStatus(pull models.PullRequest) (models.PullReqStatus, error) { +func (mock *MockPullReqStatusFetcher) FetchPullStatus(logger logging.SimpleLogging, pull models.PullRequest) (models.PullReqStatus, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockPullReqStatusFetcher().") } - params := []pegomock.Param{pull} + params := []pegomock.Param{logger, pull} result := pegomock.GetGenericMockFrom(mock).Invoke("FetchPullStatus", params, []reflect.Type{reflect.TypeOf((*models.PullReqStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 models.PullReqStatus var ret1 error @@ -81,8 +82,8 @@ type VerifierMockPullReqStatusFetcher struct { timeout time.Duration } -func (verifier *VerifierMockPullReqStatusFetcher) FetchPullStatus(pull models.PullRequest) *MockPullReqStatusFetcher_FetchPullStatus_OngoingVerification { - params := []pegomock.Param{pull} +func (verifier *VerifierMockPullReqStatusFetcher) FetchPullStatus(logger logging.SimpleLogging, pull models.PullRequest) *MockPullReqStatusFetcher_FetchPullStatus_OngoingVerification { + params := []pegomock.Param{logger, pull} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "FetchPullStatus", params, verifier.timeout) return &MockPullReqStatusFetcher_FetchPullStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -92,17 +93,21 @@ type MockPullReqStatusFetcher_FetchPullStatus_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockPullReqStatusFetcher_FetchPullStatus_OngoingVerification) GetCapturedArguments() models.PullRequest { - pull := c.GetAllCapturedArguments() - return pull[len(pull)-1] +func (c *MockPullReqStatusFetcher_FetchPullStatus_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.PullRequest) { + logger, pull := c.GetAllCapturedArguments() + return logger[len(logger)-1], pull[len(pull)-1] } -func (c *MockPullReqStatusFetcher_FetchPullStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { +func (c *MockPullReqStatusFetcher_FetchPullStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.PullRequest) } } return diff --git a/server/events/vcs/not_configured_vcs_client.go b/server/events/vcs/not_configured_vcs_client.go index b7eed9e900..41b14ad2c6 100644 --- a/server/events/vcs/not_configured_vcs_client.go +++ b/server/events/vcs/not_configured_vcs_client.go @@ -17,6 +17,7 @@ import ( "fmt" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" ) // NotConfiguredVCSClient is used as a placeholder when Atlantis isn't configured @@ -26,31 +27,31 @@ type NotConfiguredVCSClient struct { Host models.VCSHostType } -func (a *NotConfiguredVCSClient) GetModifiedFiles(_ models.Repo, _ models.PullRequest) ([]string, error) { +func (a *NotConfiguredVCSClient) GetModifiedFiles(_ logging.SimpleLogging, _ models.Repo, _ models.PullRequest) ([]string, error) { return nil, a.err() } -func (a *NotConfiguredVCSClient) CreateComment(_ models.Repo, _ int, _ string, _ string) error { +func (a *NotConfiguredVCSClient) CreateComment(_ logging.SimpleLogging, _ models.Repo, _ int, _ string, _ string) error { return a.err() } -func (a *NotConfiguredVCSClient) HidePrevCommandComments(_ models.Repo, _ int, _ string, _ string) error { +func (a *NotConfiguredVCSClient) HidePrevCommandComments(_ logging.SimpleLogging, _ models.Repo, _ int, _ string, _ string) error { return nil } -func (a *NotConfiguredVCSClient) ReactToComment(repo models.Repo, pullNum int, commentID int64, reaction string) error { // nolint: revive +func (a *NotConfiguredVCSClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error { // nolint: revive return nil } -func (a *NotConfiguredVCSClient) PullIsApproved(_ models.Repo, _ models.PullRequest) (models.ApprovalStatus, error) { +func (a *NotConfiguredVCSClient) PullIsApproved(_ logging.SimpleLogging, _ models.Repo, _ models.PullRequest) (models.ApprovalStatus, error) { return models.ApprovalStatus{}, a.err() } func (a *NotConfiguredVCSClient) DiscardReviews(_ models.Repo, _ models.PullRequest) error { return nil } -func (a *NotConfiguredVCSClient) PullIsMergeable(_ models.Repo, _ models.PullRequest, _ string) (bool, error) { +func (a *NotConfiguredVCSClient) PullIsMergeable(_ logging.SimpleLogging, _ models.Repo, _ models.PullRequest, _ string, _ []string) (bool, error) { return false, a.err() } -func (a *NotConfiguredVCSClient) UpdateStatus(_ models.Repo, _ models.PullRequest, _ models.CommitStatus, _ string, _ string, _ string) error { +func (a *NotConfiguredVCSClient) UpdateStatus(_ logging.SimpleLogging, _ models.Repo, _ models.PullRequest, _ models.CommitStatus, _ string, _ string, _ string) error { return a.err() } -func (a *NotConfiguredVCSClient) MergePull(_ models.PullRequest, _ models.PullRequestOptions) error { +func (a *NotConfiguredVCSClient) MergePull(_ logging.SimpleLogging, _ models.PullRequest, _ models.PullRequestOptions) error { return a.err() } func (a *NotConfiguredVCSClient) MarkdownPullLink(_ models.PullRequest) (string, error) { @@ -67,13 +68,13 @@ func (a *NotConfiguredVCSClient) SupportsSingleFileDownload(_ models.Repo) bool return false } -func (a *NotConfiguredVCSClient) GetFileContent(_ models.PullRequest, _ string) (bool, []byte, error) { +func (a *NotConfiguredVCSClient) GetFileContent(_ logging.SimpleLogging, _ models.PullRequest, _ string) (bool, []byte, error) { return true, []byte{}, a.err() } -func (a *NotConfiguredVCSClient) GetCloneURL(_ models.VCSHostType, _ string) (string, error) { +func (a *NotConfiguredVCSClient) GetCloneURL(_ logging.SimpleLogging, _ models.VCSHostType, _ string) (string, error) { return "", a.err() } -func (a *NotConfiguredVCSClient) GetPullLabels(_ models.Repo, _ models.PullRequest) ([]string, error) { +func (a *NotConfiguredVCSClient) GetPullLabels(_ logging.SimpleLogging, _ models.Repo, _ models.PullRequest) ([]string, error) { return nil, a.err() } diff --git a/server/events/vcs/proxy.go b/server/events/vcs/proxy.go index 768b0b6255..68aa45bf58 100644 --- a/server/events/vcs/proxy.go +++ b/server/events/vcs/proxy.go @@ -15,6 +15,7 @@ package vcs import ( "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" ) // ClientProxy proxies calls to the correct VCS client depending on which @@ -25,7 +26,7 @@ type ClientProxy struct { clients map[models.VCSHostType]Client } -func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClient Client, bitbucketServerClient Client, azuredevopsClient Client) *ClientProxy { +func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClient Client, bitbucketServerClient Client, azuredevopsClient Client, giteaClient Client) *ClientProxy { if githubClient == nil { githubClient = &NotConfiguredVCSClient{} } @@ -41,6 +42,9 @@ func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClie if azuredevopsClient == nil { azuredevopsClient = &NotConfiguredVCSClient{} } + if giteaClient == nil { + giteaClient = &NotConfiguredVCSClient{} + } return &ClientProxy{ clients: map[models.VCSHostType]Client{ models.Github: githubClient, @@ -48,44 +52,45 @@ func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClie models.BitbucketCloud: bitbucketCloudClient, models.BitbucketServer: bitbucketServerClient, models.AzureDevops: azuredevopsClient, + models.Gitea: giteaClient, }, } } -func (d *ClientProxy) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { - return d.clients[repo.VCSHost.Type].GetModifiedFiles(repo, pull) +func (d *ClientProxy) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + return d.clients[repo.VCSHost.Type].GetModifiedFiles(logger, repo, pull) } -func (d *ClientProxy) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { - return d.clients[repo.VCSHost.Type].CreateComment(repo, pullNum, comment, command) +func (d *ClientProxy) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) error { + return d.clients[repo.VCSHost.Type].CreateComment(logger, repo, pullNum, comment, command) } -func (d *ClientProxy) HidePrevCommandComments(repo models.Repo, pullNum int, command string, dir string) error { - return d.clients[repo.VCSHost.Type].HidePrevCommandComments(repo, pullNum, command, dir) +func (d *ClientProxy) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error { + return d.clients[repo.VCSHost.Type].HidePrevCommandComments(logger, repo, pullNum, command, dir) } -func (d *ClientProxy) ReactToComment(repo models.Repo, pullNum int, commentID int64, reaction string) error { - return d.clients[repo.VCSHost.Type].ReactToComment(repo, pullNum, commentID, reaction) +func (d *ClientProxy) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error { + return d.clients[repo.VCSHost.Type].ReactToComment(logger, repo, pullNum, commentID, reaction) } -func (d *ClientProxy) PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { - return d.clients[repo.VCSHost.Type].PullIsApproved(repo, pull) +func (d *ClientProxy) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { + return d.clients[repo.VCSHost.Type].PullIsApproved(logger, repo, pull) } func (d *ClientProxy) DiscardReviews(repo models.Repo, pull models.PullRequest) error { return d.clients[repo.VCSHost.Type].DiscardReviews(repo, pull) } -func (d *ClientProxy) PullIsMergeable(repo models.Repo, pull models.PullRequest, vcsstatusname string) (bool, error) { - return d.clients[repo.VCSHost.Type].PullIsMergeable(repo, pull, vcsstatusname) +func (d *ClientProxy) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, vcsstatusname string, ignoreVCSStatusNames []string) (bool, error) { + return d.clients[repo.VCSHost.Type].PullIsMergeable(logger, repo, pull, vcsstatusname, ignoreVCSStatusNames) } -func (d *ClientProxy) UpdateStatus(repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { - return d.clients[repo.VCSHost.Type].UpdateStatus(repo, pull, state, src, description, url) +func (d *ClientProxy) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { + return d.clients[repo.VCSHost.Type].UpdateStatus(logger, repo, pull, state, src, description, url) } -func (d *ClientProxy) MergePull(pull models.PullRequest, pullOptions models.PullRequestOptions) error { - return d.clients[pull.BaseRepo.VCSHost.Type].MergePull(pull, pullOptions) +func (d *ClientProxy) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error { + return d.clients[pull.BaseRepo.VCSHost.Type].MergePull(logger, pull, pullOptions) } func (d *ClientProxy) MarkdownPullLink(pull models.PullRequest) (string, error) { @@ -96,18 +101,18 @@ func (d *ClientProxy) GetTeamNamesForUser(repo models.Repo, user models.User) ([ return d.clients[repo.VCSHost.Type].GetTeamNamesForUser(repo, user) } -func (d *ClientProxy) GetFileContent(pull models.PullRequest, fileName string) (bool, []byte, error) { - return d.clients[pull.BaseRepo.VCSHost.Type].GetFileContent(pull, fileName) +func (d *ClientProxy) GetFileContent(logger logging.SimpleLogging, pull models.PullRequest, fileName string) (bool, []byte, error) { + return d.clients[pull.BaseRepo.VCSHost.Type].GetFileContent(logger, pull, fileName) } func (d *ClientProxy) SupportsSingleFileDownload(repo models.Repo) bool { return d.clients[repo.VCSHost.Type].SupportsSingleFileDownload(repo) } -func (d *ClientProxy) GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) { - return d.clients[VCSHostType].GetCloneURL(VCSHostType, repo) +func (d *ClientProxy) GetCloneURL(logger logging.SimpleLogging, VCSHostType models.VCSHostType, repo string) (string, error) { + return d.clients[VCSHostType].GetCloneURL(logger, VCSHostType, repo) } -func (d *ClientProxy) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { - return d.clients[repo.VCSHost.Type].GetPullLabels(repo, pull) +func (d *ClientProxy) GetPullLabels(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + return d.clients[repo.VCSHost.Type].GetPullLabels(logger, repo, pull) } diff --git a/server/events/vcs/pull_status_fetcher.go b/server/events/vcs/pull_status_fetcher.go index 43aef1183c..b96d69011a 100644 --- a/server/events/vcs/pull_status_fetcher.go +++ b/server/events/vcs/pull_status_fetcher.go @@ -3,33 +3,36 @@ package vcs import ( "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" ) -//go:generate pegomock generate --package mocks -o mocks/mock_pull_req_status_fetcher.go PullReqStatusFetcher +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events/vcs --package mocks -o mocks/mock_pull_req_status_fetcher.go PullReqStatusFetcher type PullReqStatusFetcher interface { - FetchPullStatus(pull models.PullRequest) (models.PullReqStatus, error) + FetchPullStatus(logger logging.SimpleLogging, pull models.PullRequest) (models.PullReqStatus, error) } type pullReqStatusFetcher struct { - client Client - vcsStatusName string + client Client + vcsStatusName string + ignoreVCSStatusNames []string } -func NewPullReqStatusFetcher(client Client, vcsStatusName string) PullReqStatusFetcher { +func NewPullReqStatusFetcher(client Client, vcsStatusName string, ignoreVCSStatusNames []string) PullReqStatusFetcher { return &pullReqStatusFetcher{ - client: client, - vcsStatusName: vcsStatusName, + client: client, + vcsStatusName: vcsStatusName, + ignoreVCSStatusNames: ignoreVCSStatusNames, } } -func (f *pullReqStatusFetcher) FetchPullStatus(pull models.PullRequest) (pullStatus models.PullReqStatus, err error) { - approvalStatus, err := f.client.PullIsApproved(pull.BaseRepo, pull) +func (f *pullReqStatusFetcher) FetchPullStatus(logger logging.SimpleLogging, pull models.PullRequest) (pullStatus models.PullReqStatus, err error) { + approvalStatus, err := f.client.PullIsApproved(logger, pull.BaseRepo, pull) if err != nil { return pullStatus, errors.Wrapf(err, "fetching pull approval status for repo: %s, and pull number: %d", pull.BaseRepo.FullName, pull.Num) } - mergeable, err := f.client.PullIsMergeable(pull.BaseRepo, pull, f.vcsStatusName) + mergeable, err := f.client.PullIsMergeable(logger, pull.BaseRepo, pull, f.vcsStatusName, f.ignoreVCSStatusNames) if err != nil { return pullStatus, errors.Wrapf(err, "fetching mergeability status for repo: %s, and pull number: %d", pull.BaseRepo.FullName, pull.Num) } diff --git a/server/events/vcs/testdata/fixtures.go b/server/events/vcs/testdata/fixtures.go index ba77ec8318..d637c3b5dd 100644 --- a/server/events/vcs/testdata/fixtures.go +++ b/server/events/vcs/testdata/fixtures.go @@ -22,7 +22,7 @@ import ( "testing" "github.com/golang-jwt/jwt/v5" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/mcdafydd/go-azuredevops/azuredevops" ) @@ -496,6 +496,79 @@ var githubAppInstallationJSON = `[ } ]` +var githubAppMultipleInstallationJSON = `[ + { + "id": 1, + "account": { + "login": "github", + "id": 1, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjE=", + "url": "https://api.github.com/orgs/github", + "repos_url": "https://api.github.com/orgs/github/repos", + "events_url": "https://api.github.com/orgs/github/events", + "hooks_url": "https://api.github.com/orgs/github/hooks", + "issues_url": "https://api.github.com/orgs/github/issues", + "members_url": "https://api.github.com/orgs/github/members{/member}", + "public_members_url": "https://api.github.com/orgs/github/public_members{/member}", + "avatar_url": "https://github.com/images/error/octocat_happy.gif", + "description": "A great organization" + }, + "access_tokens_url": "https://api.github.com/installations/1/access_tokens", + "repositories_url": "https://api.github.com/installation/repositories", + "html_url": "https://github.com/organizations/github/settings/installations/1", + "app_id": 1, + "target_id": 1, + "target_type": "Organization", + "permissions": { + "metadata": "read", + "contents": "read", + "issues": "write", + "single_file": "write" + }, + "events": [ + "push", + "pull_request" + ], + "single_file_name": "config.yml", + "repository_selection": "selected" + }, + { + "id": 2, + "account": { + "login": "github", + "id": 1, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjE=", + "url": "https://api.github.com/orgs/github", + "repos_url": "https://api.github.com/orgs/github/repos", + "events_url": "https://api.github.com/orgs/github/events", + "hooks_url": "https://api.github.com/orgs/github/hooks", + "issues_url": "https://api.github.com/orgs/github/issues", + "members_url": "https://api.github.com/orgs/github/members{/member}", + "public_members_url": "https://api.github.com/orgs/github/public_members{/member}", + "avatar_url": "https://github.com/images/error/octocat_happy.gif", + "description": "A great organization" + }, + "access_tokens_url": "https://api.github.com/installations/1/access_tokens", + "repositories_url": "https://api.github.com/installation/repositories", + "html_url": "https://github.com/organizations/github/settings/installations/1", + "app_id": 1, + "target_id": 1, + "target_type": "Organization", + "permissions": { + "metadata": "read", + "contents": "read", + "issues": "write", + "single_file": "write" + }, + "events": [ + "push", + "pull_request" + ], + "single_file_name": "config.yml", + "repository_selection": "selected" + } +]` + // nolint: gosec var githubAppTokenJSON = `{ "token": "some-token", @@ -741,3 +814,58 @@ func GithubAppTestServer(t *testing.T) (string, error) { return testServerURL.Host, err } + +func GithubMultipleAppTestServer(t *testing.T) (string, error) { + counter := 0 + testServer := httptest.NewTLSServer( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/api/v3/app-manifests/good-code/conversions": + encodedKey := strings.Join(strings.Split(GithubPrivateKey, "\n"), "\\n") + appInfo := fmt.Sprintf(githubConversionJSON, encodedKey) + w.Write([]byte(appInfo)) // nolint: errcheck + // https://developer.github.com/v3/apps/#list-installations + case "/api/v3/app/installations": + token := strings.Replace(r.Header.Get("Authorization"), "Bearer ", "", 1) + if err := validateGithubToken(token); err != nil { + w.WriteHeader(403) + w.Write([]byte("Invalid token")) // nolint: errcheck + return + } + + w.Write([]byte(githubAppMultipleInstallationJSON)) // nolint: errcheck + return + case "/api/v3/apps/some-app": + token := strings.Replace(r.Header.Get("Authorization"), "token ", "", 1) + + // token is taken from githubAppTokenJSON + if token != "some-token" { + w.WriteHeader(403) + w.Write([]byte("Invalid installation token")) // nolint: errcheck + return + } + w.Write([]byte(githubAppJSON)) // nolint: errcheck + return + case "/api/v3/app/installations/1/access_tokens": + token := strings.Replace(r.Header.Get("Authorization"), "Bearer ", "", 1) + if err := validateGithubToken(token); err != nil { + w.WriteHeader(403) + w.Write([]byte("Invalid token")) // nolint: errcheck + return + } + + appToken := fmt.Sprintf(githubAppTokenJSON, counter) + counter++ + w.Write([]byte(appToken)) // nolint: errcheck + return + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + return + } + })) + + testServerURL, err := url.Parse(testServer.URL) + + return testServerURL.Host, err +} diff --git a/server/events/vcs/testdata/github-branch-protection-no-required-checks.json b/server/events/vcs/testdata/github-branch-protection-no-required-checks.json deleted file mode 100644 index 4dd1496b79..0000000000 --- a/server/events/vcs/testdata/github-branch-protection-no-required-checks.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "url": "https://api.github.com/repos/octocat/repo/branches/master/protection", - "required_pull_request_reviews": { - "url": "https://api.github.com/repos/octocat/repo/branches/master/protection/required_pull_request_reviews", - "dismiss_stale_reviews": false, - "require_code_owner_reviews": false, - "require_last_push_approval": false, - "required_approving_review_count": 1 - } -} diff --git a/server/events/vcs/testdata/github-branch-protection-required-checks.json b/server/events/vcs/testdata/github-branch-protection-required-checks.json deleted file mode 100644 index 9f422db9ea..0000000000 --- a/server/events/vcs/testdata/github-branch-protection-required-checks.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "url": "https://api.github.com/repos/octocat/Hello-World/branches/master/protection", - "required_status_checks": { - "url": "https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_status_checks", - "strict": true, - "contexts": [ - "atlantis/apply" - ], - "contexts_url": "https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_status_checks/contexts", - "checks": [ - { - "context": "atlantis/apply", - "app_id": 123456 - } - ] - } -} diff --git a/server/events/vcs/testdata/github-commit-check-suites-check-runs-completed.json b/server/events/vcs/testdata/github-commit-check-suites-check-runs-completed.json deleted file mode 100644 index 125e4d1ddf..0000000000 --- a/server/events/vcs/testdata/github-commit-check-suites-check-runs-completed.json +++ /dev/null @@ -1,95 +0,0 @@ -{ - "total_count": 1, - "check_runs": [ - { - "id": 4, - "head_sha": "ce587453ced02b1526dfb4cb910479d431683101", - "node_id": "MDg6Q2hlY2tSdW40", - "external_id": "", - "url": "https://api.github.com/repos/github/hello-world/check-runs/4", - "html_url": "https://github.com/github/hello-world/runs/4", - "details_url": "https://example.com", - "status": "completed", - "conclusion": "success", - "started_at": "2018-05-04T01:14:52Z", - "completed_at": "2018-05-04T01:14:52Z", - "output": { - "title": "Mighty Readme report", - "summary": "There are 0 failures, 2 warnings, and 1 notice.", - "text": "You may have some misspelled words on lines 2 and 4. You also may want to add a section in your README about how to install your app.", - "annotations_count": 2, - "annotations_url": "https://api.github.com/repos/github/hello-world/check-runs/4/annotations" - }, - "name": "mighty_readme", - "check_suite": { - "id": 5 - }, - "app": { - "id": 1, - "slug": "octoapp", - "node_id": "MDExOkludGVncmF0aW9uMQ==", - "owner": { - "login": "github", - "id": 1, - "node_id": "MDEyOk9yZ2FuaXphdGlvbjE=", - "url": "https://api.github.com/orgs/github", - "repos_url": "https://api.github.com/orgs/github/repos", - "events_url": "https://api.github.com/orgs/github/events", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": true - }, - "name": "Octocat App", - "description": "", - "external_url": "https://example.com", - "html_url": "https://github.com/apps/octoapp", - "created_at": "2017-07-08T16:18:44-04:00", - "updated_at": "2017-07-08T16:18:44-04:00", - "permissions": { - "metadata": "read", - "contents": "read", - "issues": "write", - "single_file": "write" - }, - "events": [ - "push", - "pull_request" - ] - }, - "pull_requests": [ - { - "url": "https://api.github.com/repos/github/hello-world/pulls/1", - "id": 1934, - "number": 3956, - "head": { - "ref": "say-hello", - "sha": "3dca65fa3e8d4b3da3f3d056c59aee1c50f41390", - "repo": { - "id": 526, - "url": "https://api.github.com/repos/github/hello-world", - "name": "hello-world" - } - }, - "base": { - "ref": "master", - "sha": "e7fdf7640066d71ad16a86fbcbb9c6a10a18af4f", - "repo": { - "id": 526, - "url": "https://api.github.com/repos/github/hello-world", - "name": "hello-world" - } - } - } - ] - } - ] -} diff --git a/server/events/vcs/testdata/github-commit-check-suites-completed.json b/server/events/vcs/testdata/github-commit-check-suites-completed.json deleted file mode 100644 index b8af9c32a9..0000000000 --- a/server/events/vcs/testdata/github-commit-check-suites-completed.json +++ /dev/null @@ -1,169 +0,0 @@ -{ - "total_count": 1, - "check_suites": [ - { - "id": 1234567890, - "node_id": "CS_kwDOHE7PYM8AAAAB2iIZfQ", - "head_branch": "atlantis-patch-2", - "head_sha": "4273e07c528292222f119a040079093bf1f11232", - "status": "completed", - "conclusion": null, - "url": "https://api.github.com/repos/octocat/Hello-World/check-suites/1234567890", - "before": "0000000000000000000000000000000000000000", - "after": "4273e07c528292222f119a040079093bf1f11232", - "pull_requests": [ - { - "url": "https://api.github.com/repos/octocat/Hello-World/pulls/1", - "id": 1035065545, - "number": 1, - "head": { - "ref": "atlantis-patch-2", - "sha": "4273e07c528292222f119a040079093bf1f11232", - "repo": { - "id": 474926944, - "url": "https://api.github.com/repos/octocat/Hello-World", - "name": "Hello-World" - } - }, - "base": { - "ref": "main", - "sha": "6f5744874b33ceb6a5c91edc91085991dbd1f61a", - "repo": { - "id": 474926944, - "url": "https://api.github.com/repos/octocat/Hello-World", - "name": "Hello-World" - } - } - } - ], - "app": { - "id": 184783, - "slug": "atlantis", - "node_id": "A_kwHOBbMkBs4AAtHP", - "owner": { - "login": "octocat", - "id": 95626246, - "node_id": "O_kgDOBbMkBg", - "avatar_url": "https://avatars.githubusercontent.com/u/95626246?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "Organization", - "site_admin": false - }, - "name": "atlantis", - "description": "", - "external_url": "https://atlantis.localhost/", - "html_url": "https://github.com/apps/atlantis", - "created_at": "2022-03-29T08:51:26Z", - "updated_at": "2022-07-01T11:35:37Z", - "permissions": { - "administration": "read", - "checks": "write", - "contents": "write", - "issues": "write", - "metadata": "read", - "pull_requests": "write", - "statuses": "write" - }, - "events": [] - }, - "created_at": "2022-08-24T07:06:21Z", - "updated_at": "2022-08-24T07:06:21Z", - "rerequestable": true, - "runs_rerequestable": true, - "latest_check_runs_count": 0, - "check_runs_url": "https://api.github.com/repos/octocat/Hello-World/check-suites/1234567890/check-runs", - "head_commit": { - "id": "4273e07c528292222f119a040079093bf1f11232", - "tree_id": "56781332464aabdfae51b7f37f72ffc6ce8ce54e", - "message": "test atlantis", - "timestamp": "2022-08-24T07:06:21Z", - "author": { - "name": "octocat", - "email": "octocat@noreply.github.com" - }, - "committer": { - "name": "GitHub", - "email": "noreply@github.com" - } - }, - "repository": { - "id": 474926944, - "node_id": "R_kgDOHE7PYA", - "name": "Hello-World", - "full_name": "octocat/Hello-World", - "private": true, - "owner": { - "login": "octocat", - "id": 95626246, - "node_id": "O_kgDOBbMkBg", - "avatar_url": "https://avatars.githubusercontent.com/u/95626246?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "Organization", - "site_admin": false - }, - "html_url": "https://github.com/octocat/Hello-World", - "description": null, - "fork": false, - "url": "https://api.github.com/repos/octocat/Hello-World", - "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks", - "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams", - "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks", - "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}", - "events_url": "https://api.github.com/repos/octocat/Hello-World/events", - "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}", - "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}", - "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags", - "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}", - "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages", - "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers", - "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors", - "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers", - "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription", - "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}", - "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges", - "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads", - "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}", - "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}", - "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}", - "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}", - "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}", - "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments" - } - } - ] -} diff --git a/server/events/vcs/testdata/github-commit-check-suites.json b/server/events/vcs/testdata/github-commit-check-suites.json deleted file mode 100644 index 017aee7e01..0000000000 --- a/server/events/vcs/testdata/github-commit-check-suites.json +++ /dev/null @@ -1,169 +0,0 @@ -{ - "total_count": 1, - "check_suites": [ - { - "id": 1234567890, - "node_id": "CS_kwDOHE7PYM8AAAAB2iIZfQ", - "head_branch": "atlantis-patch-2", - "head_sha": "4273e07c528292222f119a040079093bf1f11232", - "status": "queued", - "conclusion": null, - "url": "https://api.github.com/repos/octocat/Hello-World/check-suites/1234567890", - "before": "0000000000000000000000000000000000000000", - "after": "4273e07c528292222f119a040079093bf1f11232", - "pull_requests": [ - { - "url": "https://api.github.com/repos/octocat/Hello-World/pulls/1", - "id": 1035065545, - "number": 1, - "head": { - "ref": "atlantis-patch-2", - "sha": "4273e07c528292222f119a040079093bf1f11232", - "repo": { - "id": 474926944, - "url": "https://api.github.com/repos/octocat/Hello-World", - "name": "Hello-World" - } - }, - "base": { - "ref": "main", - "sha": "6f5744874b33ceb6a5c91edc91085991dbd1f61a", - "repo": { - "id": 474926944, - "url": "https://api.github.com/repos/octocat/Hello-World", - "name": "Hello-World" - } - } - } - ], - "app": { - "id": 184783, - "slug": "atlantis", - "node_id": "A_kwHOBbMkBs4AAtHP", - "owner": { - "login": "octocat", - "id": 95626246, - "node_id": "O_kgDOBbMkBg", - "avatar_url": "https://avatars.githubusercontent.com/u/95626246?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "Organization", - "site_admin": false - }, - "name": "atlantis", - "description": "", - "external_url": "https://atlantis.localhost/", - "html_url": "https://github.com/apps/atlantis", - "created_at": "2022-03-29T08:51:26Z", - "updated_at": "2022-07-01T11:35:37Z", - "permissions": { - "administration": "read", - "checks": "write", - "contents": "write", - "issues": "write", - "metadata": "read", - "pull_requests": "write", - "statuses": "write" - }, - "events": [] - }, - "created_at": "2022-08-24T07:06:21Z", - "updated_at": "2022-08-24T07:06:21Z", - "rerequestable": true, - "runs_rerequestable": true, - "latest_check_runs_count": 0, - "check_runs_url": "https://api.github.com/repos/octocat/Hello-World/check-suites/1234567890/check-runs", - "head_commit": { - "id": "4273e07c528292222f119a040079093bf1f11232", - "tree_id": "56781332464aabdfae51b7f37f72ffc6ce8ce54e", - "message": "test atlantis", - "timestamp": "2022-08-24T07:06:21Z", - "author": { - "name": "octocat", - "email": "octocat@noreply.github.com" - }, - "committer": { - "name": "GitHub", - "email": "noreply@github.com" - } - }, - "repository": { - "id": 474926944, - "node_id": "R_kgDOHE7PYA", - "name": "Hello-World", - "full_name": "octocat/Hello-World", - "private": true, - "owner": { - "login": "octocat", - "id": 95626246, - "node_id": "O_kgDOBbMkBg", - "avatar_url": "https://avatars.githubusercontent.com/u/95626246?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "Organization", - "site_admin": false - }, - "html_url": "https://github.com/octocat/Hello-World", - "description": null, - "fork": false, - "url": "https://api.github.com/repos/octocat/Hello-World", - "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks", - "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams", - "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks", - "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}", - "events_url": "https://api.github.com/repos/octocat/Hello-World/events", - "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}", - "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}", - "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags", - "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}", - "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages", - "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers", - "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors", - "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers", - "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription", - "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}", - "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges", - "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads", - "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}", - "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}", - "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}", - "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}", - "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}", - "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments" - } - } - ] -} diff --git a/server/events/vcs/testdata/github-commit-status-full.json b/server/events/vcs/testdata/github-commit-status-full.json deleted file mode 100644 index a042101c40..0000000000 --- a/server/events/vcs/testdata/github-commit-status-full.json +++ /dev/null @@ -1,148 +0,0 @@ -{ - "state": "blocked", - "statuses": [ - { - "url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "avatar_url": "https://avatars.githubusercontent.com/u/583231?v=4", - "id": 16230299674, - "node_id": "SC_kwDOFRFvL88AAAADx2a4Gg", - "state": "success", - "description": "Plan succeeded.", - "target_url": "https://localhost/jobs/octocat/Hello-World/1/project1", - "context": "atlantis/plan: project1", - "created_at": "2022-02-10T15:26:01Z", - "updated_at": "2022-02-10T15:26:01Z" - }, - { - "url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "avatar_url": "https://avatars.githubusercontent.com/u/583231?v=4", - "id": 16230303174, - "node_id": "SC_kwDOFRFvL88AAAADx2bFxg", - "state": "success", - "description": "Plan succeeded.", - "target_url": "https://localhost/jobs/octocat/Hello-World/1/project2", - "context": "atlantis/plan: project2", - "created_at": "2022-02-10T15:26:12Z", - "updated_at": "2022-02-10T15:26:12Z" - }, - { - "url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "avatar_url": "https://avatars.githubusercontent.com/u/583231?v=4", - "id": 16230303679, - "node_id": "SC_kwDOFRFvL88AAAADx2bHvw", - "state": "success", - "description": "2/2 projects planned successfully.", - "target_url": "", - "context": "atlantis/plan", - "created_at": "2022-02-10T15:26:13Z", - "updated_at": "2022-02-10T15:26:13Z" - }, - { - "url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "avatar_url": "https://avatars.githubusercontent.com/u/583231?v=4", - "id": 16230307923, - "node_id": "SC_kwDOFRFvL88AAAADx2bYUw", - "state": "failure", - "description": "Apply failed.", - "target_url": "https://localhost/jobs/octocat/Hello-World/1/project1", - "context": "atlantis/apply: project1", - "created_at": "2022-02-10T15:26:27Z", - "updated_at": "2022-02-10T15:26:27Z" - }, - { - "url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "avatar_url": "https://avatars.githubusercontent.com/u/583231?v=4", - "id": 16230308153, - "node_id": "SC_kwDOFRFvL88AAAADx2bZOQ", - "state": "failure", - "description": "Apply failed.", - "target_url": "https://localhost/jobs/octocat/Hello-World/1/project2", - "context": "atlantis/apply: project2", - "created_at": "2022-02-10T15:26:27Z", - "updated_at": "2022-02-10T15:26:27Z" - }, - { - "url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "avatar_url": "https://avatars.githubusercontent.com/u/583231?v=4", - "id": 16230308528, - "node_id": "SC_kwDOFRFvL88AAAADx2basA", - "state": "failure", - "description": "0/2 projects applied successfully.", - "target_url": "", - "context": "atlantis/apply", - "created_at": "2022-02-10T15:26:28Z", - "updated_at": "2022-02-10T15:26:28Z" - } - ], - "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e", - "total_count": 0, - "repository": { - "id": 1296269, - "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5", - "name": "Hello-World", - "full_name": "octocat/Hello-World", - "private": false, - "owner": { - "login": "octocat", - "id": 583231, - "node_id": "MDQ6VXNlcjU4MzIzMQ==", - "avatar_url": "https://avatars.githubusercontent.com/u/583231?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "html_url": "https://github.com/octocat/Hello-World", - "description": "My first repository on GitHub!", - "fork": false, - "url": "https://api.github.com/repos/octocat/Hello-World", - "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks", - "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams", - "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks", - "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}", - "events_url": "https://api.github.com/repos/octocat/Hello-World/events", - "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}", - "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}", - "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags", - "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}", - "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages", - "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers", - "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors", - "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers", - "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription", - "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}", - "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges", - "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads", - "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}", - "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}", - "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}", - "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}", - "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}", - "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments" - }, - "commit_url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e/status" - } \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/branch-protection-expected.json b/server/events/vcs/testdata/github-pull-request-mergeability/branch-protection-expected.json new file mode 100644 index 0000000000..ad84fee1f3 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/branch-protection-expected.json @@ -0,0 +1,58 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "my-required-expected-check" + } + ] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/branch-protection-failed.json b/server/events/vcs/testdata/github-pull-request-mergeability/branch-protection-failed.json new file mode 100644 index 0000000000..8b1ee9c1b5 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/branch-protection-failed.json @@ -0,0 +1,64 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "my-required-expected-check" + } + ] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "StatusContext", + "context": "my-required-expected-check", + "state": "FAILED", + "isRequired": true + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/branch-protection-passed.json b/server/events/vcs/testdata/github-pull-request-mergeability/branch-protection-passed.json new file mode 100644 index 0000000000..3dd40fcf2a --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/branch-protection-passed.json @@ -0,0 +1,64 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "my-required-expected-check" + } + ] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "StatusContext", + "context": "my-required-expected-check", + "state": "SUCCESS", + "isRequired": true + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/repository-id.json b/server/events/vcs/testdata/github-pull-request-mergeability/repository-id.json new file mode 100644 index 0000000000..9e6d02e114 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/repository-id.json @@ -0,0 +1,7 @@ +{ + "data": { + "repository": { + "databaseId": 120519269 + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-atlantis-apply-expected.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-atlantis-apply-expected.json new file mode 100644 index 0000000000..f83b126d4e --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-atlantis-apply-expected.json @@ -0,0 +1,52 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [] + } + } + } + } + ] + } + } + } + } +} diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-atlantis-apply-pending.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-atlantis-apply-pending.json new file mode 100644 index 0000000000..4ce1799bcd --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-atlantis-apply-pending.json @@ -0,0 +1,65 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-expected.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-expected.json new file mode 100644 index 0000000000..d0914e6395 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-expected.json @@ -0,0 +1,68 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "my-required-expected-check" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-failed-other-atlantis.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-failed-other-atlantis.json new file mode 100644 index 0000000000..38d291aa74 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-failed-other-atlantis.json @@ -0,0 +1,80 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "other-atlantis/apply" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "StatusContext", + "context": "other-atlantis/apply", + "state": "FAILED", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "other-atlantis/apply", + "state": "SUCCESS", + "isRequired": false + } + ] + } + } + } + } + ] + } + } + } + } +} diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-failed.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-failed.json new file mode 100644 index 0000000000..6bc4f55c25 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-failed.json @@ -0,0 +1,74 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "my-required-expected-check" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "StatusContext", + "context": "my-required-expected-check", + "state": "FAILED", + "isRequired": true + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-neutral.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-neutral.json new file mode 100644 index 0000000000..f6b3183780 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-neutral.json @@ -0,0 +1,74 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "my-required-expected-check" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "CheckRun", + "name": "my-required-expected-check", + "conclusion": "NEUTRAL", + "isRequired": true + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-passed.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-passed.json new file mode 100644 index 0000000000..95b7b5350c --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-passed.json @@ -0,0 +1,74 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "my-required-expected-check" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "StatusContext", + "context": "my-required-expected-check", + "state": "SUCCESS", + "isRequired": true + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-pending-other-atlantis.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-pending-other-atlantis.json new file mode 100644 index 0000000000..f0936309e0 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-pending-other-atlantis.json @@ -0,0 +1,80 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "other-atlantis/apply" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "StatusContext", + "context": "other-atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "other-atlantis/plan", + "state": "PENDING", + "isRequired": false + } + ] + } + } + } + } + ] + } + } + } + } +} diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-pending.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-pending.json new file mode 100644 index 0000000000..60d70332de --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-pending.json @@ -0,0 +1,74 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "my-required-expected-check" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "StatusContext", + "context": "my-required-expected-check", + "state": "PENDING", + "isRequired": true + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-skipped.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-skipped.json new file mode 100644 index 0000000000..8427088fe0 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-check-skipped.json @@ -0,0 +1,74 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + }, + { + "context": "my-required-expected-check" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "CheckRun", + "name": "my-required-expected-check", + "conclusion": "SKIPPED", + "isRequired": true + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-evaluate-workflow-failed.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-evaluate-workflow-failed.json new file mode 100644 index 0000000000..ddde1d8ac5 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-evaluate-workflow-failed.json @@ -0,0 +1,95 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + }, + { + "type": "WORKFLOWS", + "repositoryRuleset": { + "enforcement": "EVALUATE" + }, + "parameters": { + "workflows": [ + { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryId": 120519269, + "sha": null + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "CheckRun", + "name": "my required (evaluate-enforcement) check", + "conclusion": "FAILURE", + "isRequired": true, + "checkSuite": { + "workflowRun": { + "file": { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryFileUrl": "https://github.com/runatlantis/atlantis/blob/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.github/workflows/my-required-workflow.yaml", + "repositoryName": "runatlantis/atlantis" + } + } + } + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-optional-check-failed.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-optional-check-failed.json new file mode 100644 index 0000000000..6368cbe69e --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-optional-check-failed.json @@ -0,0 +1,71 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "CheckRun", + "name": "my-optional-check", + "conclusion": "FAILURE", + "isRequired": false + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-optional-status-failed.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-optional-status-failed.json new file mode 100644 index 0000000000..111ff4337a --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-optional-status-failed.json @@ -0,0 +1,71 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "StatusContext", + "context": "my-optional-check", + "state": "FAILURE", + "isRequired": false + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-expected.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-expected.json new file mode 100644 index 0000000000..9b21af9390 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-expected.json @@ -0,0 +1,80 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + }, + { + "type": "WORKFLOWS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "workflows": [ + { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryId": 120519269, + "sha": null + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-failed.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-failed.json new file mode 100644 index 0000000000..570fdb9276 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-failed.json @@ -0,0 +1,95 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + }, + { + "type": "WORKFLOWS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "workflows": [ + { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryId": 120519269, + "sha": null + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "CheckRun", + "name": "my required check", + "conclusion": "FAILURE", + "isRequired": true, + "checkSuite": { + "workflowRun": { + "file": { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryFileUrl": "https://github.com/runatlantis/atlantis/blob/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.github/workflows/my-required-workflow.yaml", + "repositoryName": "runatlantis/atlantis" + } + } + } + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-passed-sha-match.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-passed-sha-match.json new file mode 100644 index 0000000000..e40b014ab3 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-passed-sha-match.json @@ -0,0 +1,95 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + }, + { + "type": "WORKFLOWS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "workflows": [ + { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryId": 120519269, + "sha": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "CheckRun", + "name": "my required check", + "conclusion": "SUCCESS", + "isRequired": true, + "checkSuite": { + "workflowRun": { + "file": { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryFileUrl": "https://github.com/runatlantis/atlantis/blob/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.github/workflows/my-required-workflow.yaml", + "repositoryName": "runatlantis/atlantis" + } + } + } + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-passed-sha-mismatch.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-passed-sha-mismatch.json new file mode 100644 index 0000000000..5a79ce3e33 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-passed-sha-mismatch.json @@ -0,0 +1,95 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + }, + { + "type": "WORKFLOWS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "workflows": [ + { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryId": 120519269, + "sha": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "CheckRun", + "name": "my required check", + "conclusion": "SUCCESS", + "isRequired": true, + "checkSuite": { + "workflowRun": { + "file": { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryFileUrl": "https://github.com/runatlantis/atlantis/blob/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/.github/workflows/my-required-workflow.yaml", + "repositoryName": "runatlantis/atlantis" + } + } + } + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-passed.json b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-passed.json new file mode 100644 index 0000000000..6a88b4c8c5 --- /dev/null +++ b/server/events/vcs/testdata/github-pull-request-mergeability/ruleset-workflow-passed.json @@ -0,0 +1,95 @@ +{ + "data": { + "repository": { + "pullRequest": { + "reviewDecision": null, + "baseRef": { + "branchProtectionRule": { + "requiredStatusChecks": [] + }, + "rules": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "type": "REQUIRED_STATUS_CHECKS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "requiredStatusChecks": [ + { + "context": "atlantis/apply" + } + ] + } + }, + { + "type": "WORKFLOWS", + "repositoryRuleset": { + "enforcement": "ACTIVE" + }, + "parameters": { + "workflows": [ + { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryId": 120519269, + "sha": null + } + ] + } + } + ] + } + }, + "commits": { + "nodes": [ + { + "commit": { + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "endCursor": "QWERTY", + "hasNextPage": false + }, + "nodes": [ + { + "__typename": "StatusContext", + "context": "atlantis/apply", + "state": "PENDING", + "isRequired": true + }, + { + "__typename": "StatusContext", + "context": "atlantis/plan", + "state": "SUCCESS", + "isRequired": false + }, + { + "__typename": "CheckRun", + "name": "my required check", + "conclusion": "SUCCESS", + "isRequired": true, + "checkSuite": { + "workflowRun": { + "file": { + "path": ".github/workflows/my-required-workflow.yaml", + "repositoryFileUrl": "https://github.com/runatlantis/atlantis/blob/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.github/workflows/my-required-workflow.yaml", + "repositoryName": "runatlantis/atlantis" + } + } + } + } + ] + } + } + } + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/server/events/vcs/testdata/gitlab-detailed-merge-status-need-rebase.json b/server/events/vcs/testdata/gitlab-detailed-merge-status-need-rebase.json new file mode 100644 index 0000000000..a37f0e8577 --- /dev/null +++ b/server/events/vcs/testdata/gitlab-detailed-merge-status-need-rebase.json @@ -0,0 +1,124 @@ +{ + "id": 22461274, + "iid": 13, + "project_id": 4580910, + "title": "Update main.tf", + "description": "", + "state": "opened", + "created_at": "2019-01-15T18:27:29.375Z", + "updated_at": "2019-01-25T17:28:01.437Z", + "merged_by": null, + "merged_at": null, + "closed_by": null, + "closed_at": null, + "target_branch": "patch-1", + "source_branch": "patch-1-merger", + "user_notes_count": 0, + "upvotes": 0, + "downvotes": 0, + "author": { + "id": 1755902, + "name": "Luke Kysow", + "username": "lkysow", + "state": "active", + "avatar_url": "https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80&d=identicon", + "web_url": "https://gitlab.com/lkysow" + }, + "assignee": null, + "reviewers": [], + "source_project_id": 4580910, + "target_project_id": 4580910, + "labels": [], + "work_in_progress": false, + "milestone": null, + "merge_when_pipeline_succeeds": false, + "merge_status": "can_be_merged", + "detailed_merge_status": "need_rebase", + "sha": "cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0", + "merge_commit_sha": null, + "squash_commit_sha": null, + "discussion_locked": null, + "should_remove_source_branch": null, + "force_remove_source_branch": true, + "reference": "!13", + "references": { + "short": "!13", + "relative": "!13", + "full": "lkysow/atlantis-example!13" + }, + "web_url": "https://gitlab.com/lkysow/atlantis-example/merge_requests/13", + "time_stats": { + "time_estimate": 0, + "total_time_spent": 0, + "human_time_estimate": null, + "human_total_time_spent": null + }, + "squash": true, + "task_completion_status": { + "count": 0, + "completed_count": 0 + }, + "has_conflicts": false, + "blocking_discussions_resolved": true, + "approvals_before_merge": null, + "subscribed": false, + "changes_count": "1", + "latest_build_started_at": "2019-01-15T18:27:29.375Z", + "latest_build_finished_at": "2019-01-25T17:28:01.437Z", + "first_deployed_to_production_at": null, + "pipeline": { + "id": 488598, + "sha": "67cb91d3f6198189f433c045154a885784ba6977", + "ref": "patch-1-merger", + "status": "success", + "created_at": "2019-01-15T18:27:29.375Z", + "updated_at": "2019-01-25T17:28:01.437Z", + "web_url": "https://gitlab.com/lkysow/atlantis-example/-/pipelines/488598" + }, + "head_pipeline": { + "id": 488598, + "sha": "67cb91d3f6198189f433c045154a885784ba6977", + "ref": "patch-1-merger", + "status": "success", + "created_at": "2019-01-15T18:27:29.375Z", + "updated_at": "2019-01-25T17:28:01.437Z", + "web_url": "https://gitlab.com/lkysow/atlantis-example/-/pipelines/488598", + "before_sha": "0000000000000000000000000000000000000000", + "tag": false, + "yaml_errors": null, + "user": { + "id": 1755902, + "name": "Luke Kysow", + "username": "lkysow", + "state": "active", + "avatar_url": "https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80&d=identicon", + "web_url": "https://gitlab.com/lkysow" + }, + "started_at": "2019-01-15T18:27:29.375Z", + "finished_at": "2019-01-25T17:28:01.437Z", + "committed_at": null, + "duration": 31, + "coverage": null, + "detailed_status": { + "icon": "status_success", + "text": "passed", + "label": "passed", + "group": "success", + "tooltip": "passed", + "has_details": true, + "details_path": "/lkysow/atlantis-example/-/pipelines/488598", + "illustration": null, + "favicon": "/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png" + } + }, + "diff_refs": { + "base_sha": "67cb91d3f6198189f433c045154a885784ba6977", + "head_sha": "cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0", + "start_sha": "67cb91d3f6198189f433c045154a885784ba6977" + }, + "merge_error": null, + "first_contribution": false, + "user": { + "can_merge": true + } +} diff --git a/server/events/working_dir.go b/server/events/working_dir.go index 886b3c4b40..c2e56d8dc7 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -26,6 +26,7 @@ import ( "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" + "github.com/runatlantis/atlantis/server/utils" ) const workingDirPrefix = "repos" @@ -41,23 +42,23 @@ type WorkingDir interface { // absolute path to the root of the cloned repo. It also returns // a boolean indicating if we should warn users that the branch we're // merging into has been updated since we cloned it. - Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) + Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) // GetWorkingDir returns the path to the workspace for this repo and pull. // If workspace does not exist on disk, error will be of type os.IsNotExist. GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) - HasDiverged(cloneDir string) bool + HasDiverged(logger logging.SimpleLogging, cloneDir string) bool GetPullDir(r models.Repo, p models.PullRequest) (string, error) // Delete deletes the workspace for this repo and pull. - Delete(r models.Repo, p models.PullRequest) error - DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error + Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error + DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error // Set a flag in the workingdir so Clone() can know that it is safe to re-clone the workingdir if // the upstream branch has been modified. This is only safe after grabbing the project lock // and before running any plans SetCheckForUpstreamChanges() // DeletePlan deletes the plan for this repo, pull, workspace path and project name - DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error + DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error // GetGitUntrackedFiles returns a list of Git untracked files in the working dir. - GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) + GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) } // FileWorkspace implements WorkingDir with the file system. @@ -86,7 +87,6 @@ type FileWorkspace struct { GpgNoSigningEnabled bool // flag indicating if we have to merge with potential new changes upstream (directly after grabbing project lock) CheckForUpstreamChanges bool - Logger logging.SimpleLogging } // Clone git clones headRepo, checks out the branch and then returns the absolute @@ -95,10 +95,7 @@ type FileWorkspace struct { // If the repo already exists and is at // the right commit it does nothing. This is to support running commands in // multiple dirs of the same repo without deleting existing plans. -func (w *FileWorkspace) Clone( - headRepo models.Repo, - p models.PullRequest, - workspace string) (string, bool, error) { +func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { cloneDir := w.cloneDir(p.BaseRepo, p, workspace) defer func() { w.CheckForUpstreamChanges = false }() @@ -106,7 +103,7 @@ func (w *FileWorkspace) Clone( // If the directory already exists, check if it's at the right commit. // If so, then we do nothing. if _, err := os.Stat(cloneDir); err == nil { - w.Logger.Debug("clone directory %q already exists, checking if it's at the right commit", cloneDir) + logger.Debug("clone directory '%s' already exists, checking if it's at the right commit", cloneDir) // We use git rev-parse to see if our repo is at the right commit. // If just checking out the pull request branch, we can use HEAD. @@ -121,28 +118,28 @@ func (w *FileWorkspace) Clone( revParseCmd.Dir = cloneDir outputRevParseCmd, err := revParseCmd.CombinedOutput() if err != nil { - w.Logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) - return cloneDir, false, w.forceClone(c) + logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) + return cloneDir, false, w.forceClone(logger, c) } currCommit := strings.Trim(string(outputRevParseCmd), "\n") // We're prefix matching here because BitBucket doesn't give us the full // commit, only a 12 character prefix. if strings.HasPrefix(currCommit, p.HeadCommit) { - if w.CheckForUpstreamChanges && w.CheckoutMerge && w.recheckDiverged(p, headRepo, cloneDir) { - w.Logger.Info("base branch has been updated, using merge strategy and will clone again") - return cloneDir, true, w.mergeAgain(c) + if w.CheckForUpstreamChanges && w.CheckoutMerge && w.recheckDiverged(logger, p, headRepo, cloneDir) { + logger.Info("base branch has been updated, using merge strategy and will clone again") + return cloneDir, true, w.mergeAgain(logger, c) } - w.Logger.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) + logger.Debug("repo is at correct commit '%s' so will not re-clone", p.HeadCommit) return cloneDir, false, nil } else { - w.Logger.Debug("repo was already cloned but is not at correct commit, wanted %q got %q", p.HeadCommit, currCommit) + logger.Debug("repo was already cloned but is not at correct commit, wanted '%s' got '%s'", p.HeadCommit, currCommit) } // We'll fall through to re-clone. } // Otherwise we clone the repo. - return cloneDir, false, w.forceClone(c) + return cloneDir, false, w.forceClone(logger, c) } // recheckDiverged returns true if the branch we're merging into has diverged @@ -152,7 +149,7 @@ func (w *FileWorkspace) Clone( // and we have to perform a new merge. // If there are any errors we return false since we prefer things to succeed // vs. stopping the plan/apply. -func (w *FileWorkspace) recheckDiverged(p models.PullRequest, headRepo models.Repo, cloneDir string) bool { +func (w *FileWorkspace) recheckDiverged(logger logging.SimpleLogging, p models.PullRequest, headRepo models.Repo, cloneDir string) bool { if !w.CheckoutMerge { // It only makes sense to warn that main has diverged if we're using // the checkout merge strategy. If we're just checking out the branch, @@ -183,17 +180,16 @@ func (w *FileWorkspace) recheckDiverged(p models.PullRequest, headRepo models.Re cmd.Dir = cloneDir output, err := cmd.CombinedOutput() - if err != nil { - w.Logger.Warn("getting remote update failed: %s", string(output)) + logger.Warn("getting remote update failed: %s", string(output)) return false } } - return w.HasDiverged(cloneDir) + return w.HasDiverged(logger, cloneDir) } -func (w *FileWorkspace) HasDiverged(cloneDir string) bool { +func (w *FileWorkspace) HasDiverged(logger logging.SimpleLogging, cloneDir string) bool { if !w.CheckoutMerge { // Both the diverged warning and the UnDiverged apply requirement only apply to merge checkout strategy so // we assume false here for 'branch' strategy. @@ -204,7 +200,7 @@ func (w *FileWorkspace) HasDiverged(cloneDir string) bool { statusFetchCmd.Dir = cloneDir outputStatusFetch, err := statusFetchCmd.CombinedOutput() if err != nil { - w.Logger.Warn("fetching repo has failed: %s", string(outputStatusFetch)) + logger.Warn("fetching repo has failed: %s", string(outputStatusFetch)) return false } @@ -213,14 +209,14 @@ func (w *FileWorkspace) HasDiverged(cloneDir string) bool { statusUnoCmd.Dir = cloneDir outputStatusUno, err := statusUnoCmd.CombinedOutput() if err != nil { - w.Logger.Warn("getting repo status has failed: %s", string(outputStatusUno)) + logger.Warn("getting repo status has failed: %s", string(outputStatusUno)) return false } hasDiverged := strings.Contains(string(outputStatusUno), "have diverged") return hasDiverged } -func (w *FileWorkspace) forceClone(c wrappedGitContext) error { +func (w *FileWorkspace) forceClone(logger logging.SimpleLogging, c wrappedGitContext) error { value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex)) mutex := value.(*sync.Mutex) @@ -232,11 +228,11 @@ func (w *FileWorkspace) forceClone(c wrappedGitContext) error { err := os.RemoveAll(c.dir) if err != nil { - return errors.Wrapf(err, "deleting dir %q before cloning", c.dir) + return errors.Wrapf(err, "deleting dir '%s' before cloning", c.dir) } // Create the directory and parents if necessary. - w.Logger.Info("creating dir %q", c.dir) + logger.Info("creating dir '%s'", c.dir) if err := os.MkdirAll(c.dir, 0700); err != nil { return errors.Wrap(err, "creating new workspace") } @@ -253,37 +249,37 @@ func (w *FileWorkspace) forceClone(c wrappedGitContext) error { // if branch strategy, use depth=1 if !w.CheckoutMerge { - return w.wrappedGit(c, "clone", "--depth=1", "--branch", c.pr.HeadBranch, "--single-branch", headCloneURL, c.dir) + return w.wrappedGit(logger, c, "clone", "--depth=1", "--branch", c.pr.HeadBranch, "--single-branch", headCloneURL, c.dir) } // if merge strategy... // if no checkout depth, omit depth arg if w.CheckoutDepth == 0 { - if err := w.wrappedGit(c, "clone", "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { + if err := w.wrappedGit(logger, c, "clone", "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { return err } } else { - if err := w.wrappedGit(c, "clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { + if err := w.wrappedGit(logger, c, "clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { return err } } - if err := w.wrappedGit(c, "remote", "add", "head", headCloneURL); err != nil { + if err := w.wrappedGit(logger, c, "remote", "add", "head", headCloneURL); err != nil { return err } if w.GpgNoSigningEnabled { - if err := w.wrappedGit(c, "config", "--local", "commit.gpgsign", "false"); err != nil { + if err := w.wrappedGit(logger, c, "config", "--local", "commit.gpgsign", "false"); err != nil { return err } } - return w.mergeToBaseBranch(c) + return w.mergeToBaseBranch(logger, c) } // There is a new upstream update that we need, and we want to update to it // without deleting any existing plans -func (w *FileWorkspace) mergeAgain(c wrappedGitContext) error { +func (w *FileWorkspace) mergeAgain(logger logging.SimpleLogging, c wrappedGitContext) error { value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex)) mutex := value.(*sync.Mutex) @@ -294,11 +290,11 @@ func (w *FileWorkspace) mergeAgain(c wrappedGitContext) error { } // Reset branch as if it was cloned again - if err := w.wrappedGit(c, "reset", "--hard", fmt.Sprintf("refs/remotes/origin/%s", c.pr.BaseBranch)); err != nil { + if err := w.wrappedGit(logger, c, "reset", "--hard", fmt.Sprintf("refs/remotes/origin/%s", c.pr.BaseBranch)); err != nil { return err } - return w.mergeToBaseBranch(c) + return w.mergeToBaseBranch(logger, c) } // wrappedGitContext is the configuration for wrappedGit that is typically unchanged @@ -311,7 +307,7 @@ type wrappedGitContext struct { // wrappedGit runs git with additional environment settings required for git merge, // and with sanitized error logging to avoid leaking git credentials -func (w *FileWorkspace) wrappedGit(c wrappedGitContext, args ...string) error { +func (w *FileWorkspace) wrappedGit(logger logging.SimpleLogging, c wrappedGitContext, args ...string) error { cmd := exec.Command("git", args...) // nolint: gosec cmd.Dir = c.dir // The git merge command requires these env vars are set. @@ -327,12 +323,12 @@ func (w *FileWorkspace) wrappedGit(c wrappedGitContext, args ...string) error { sanitizedErrMsg := w.sanitizeGitCredentials(err.Error(), c.pr.BaseRepo, c.head) return fmt.Errorf("running %s: %s: %s", cmdStr, sanitizedOutput, sanitizedErrMsg) } - w.Logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n")) + logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n")) return nil } // Merge the PR into the base branch. -func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error { +func (w *FileWorkspace) mergeToBaseBranch(logger logging.SimpleLogging, c wrappedGitContext) error { fetchRef := fmt.Sprintf("+refs/heads/%s:", c.pr.HeadBranch) fetchRemote := "head" if w.GithubAppEnabled { @@ -342,19 +338,19 @@ func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error { // if no checkout depth, omit depth arg if w.CheckoutDepth == 0 { - if err := w.wrappedGit(c, "fetch", fetchRemote, fetchRef); err != nil { + if err := w.wrappedGit(logger, c, "fetch", fetchRemote, fetchRef); err != nil { return err } } else { - if err := w.wrappedGit(c, "fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil { + if err := w.wrappedGit(logger, c, "fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil { return err } } - if err := w.wrappedGit(c, "merge-base", c.pr.BaseBranch, "FETCH_HEAD"); err != nil { + if err := w.wrappedGit(logger, c, "merge-base", c.pr.BaseBranch, "FETCH_HEAD"); err != nil { // git merge-base returning error means that we did not receive enough commits in shallow clone. // Fall back to retrieving full repo history. - if err := w.wrappedGit(c, "fetch", "--unshallow"); err != nil { + if err := w.wrappedGit(logger, c, "fetch", "--unshallow"); err != nil { return err } } @@ -365,7 +361,7 @@ func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error { // git rev-parse HEAD^2 to get the head commit because it will // always succeed whereas without --no-ff, if the merge was fast // forwarded then git rev-parse HEAD^2 would fail. - return w.wrappedGit(c, "merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD") + return w.wrappedGit(logger, c, "merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD") } // GetWorkingDir returns the path to the workspace for this repo and pull. @@ -388,16 +384,16 @@ func (w *FileWorkspace) GetPullDir(r models.Repo, p models.PullRequest) (string, } // Delete deletes the workspace for this repo and pull. -func (w *FileWorkspace) Delete(r models.Repo, p models.PullRequest) error { +func (w *FileWorkspace) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { repoPullDir := w.repoPullDir(r, p) - w.Logger.Info("Deleting repo pull directory: " + repoPullDir) + logger.Info("Deleting repo pull directory: " + repoPullDir) return os.RemoveAll(repoPullDir) } // DeleteForWorkspace deletes the working dir for this workspace. -func (w *FileWorkspace) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error { +func (w *FileWorkspace) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error { workspaceDir := w.cloneDir(r, p, workspace) - w.Logger.Info("Deleting workspace directory: " + workspaceDir) + logger.Info("Deleting workspace directory: " + workspaceDir) return os.RemoveAll(workspaceDir) } @@ -421,20 +417,20 @@ func (w *FileWorkspace) SetCheckForUpstreamChanges() { w.CheckForUpstreamChanges = true } -func (w *FileWorkspace) DeletePlan(r models.Repo, p models.PullRequest, workspace string, projectPath string, projectName string) error { +func (w *FileWorkspace) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, projectPath string, projectName string) error { planPath := filepath.Join(w.cloneDir(r, p, workspace), projectPath, runtime.GetPlanFilename(workspace, projectName)) - w.Logger.Info("Deleting plan: " + planPath) - return os.Remove(planPath) + logger.Info("Deleting plan: " + planPath) + return utils.RemoveIgnoreNonExistent(planPath) } // getGitUntrackedFiles returns a list of Git untracked files in the working dir. -func (w *FileWorkspace) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) { +func (w *FileWorkspace) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) { workingDir, err := w.GetWorkingDir(r, p, workspace) if err != nil { return nil, err } - w.Logger.Debug("Checking for Git untracked files in directory: '%s'", workingDir) + logger.Debug("Checking for Git untracked files in directory: '%s'", workingDir) cmd := exec.Command("git", "ls-files", "--others", "--exclude-standard") cmd.Dir = workingDir @@ -444,6 +440,6 @@ func (w *FileWorkspace) GetGitUntrackedFiles(r models.Repo, p models.PullRequest } untrackedFiles := strings.Split(string(output), "\n")[:] - w.Logger.Debug("Untracked files: '%s'", strings.Join(untrackedFiles, ",")) + logger.Debug("Untracked files: '%s'", strings.Join(untrackedFiles, ",")) return untrackedFiles, nil } diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index f277c12e6b..e25c420100 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -43,10 +43,9 @@ func TestClone_NoneExisting(t *testing.T) { CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, _, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -96,10 +95,9 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -148,10 +146,9 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -163,7 +160,7 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -201,10 +198,9 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -216,7 +212,7 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -259,10 +255,9 @@ func TestClone_CheckoutMergeConflict(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - _, _, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -319,10 +314,9 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -350,10 +344,9 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -387,9 +380,8 @@ func TestClone_NoReclone(t *testing.T) { CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -432,9 +424,8 @@ func TestClone_RecloneWrongCommit(t *testing.T) { CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", HeadCommit: expCommit, @@ -506,7 +497,6 @@ func TestClone_MasterHasDiverged(t *testing.T) { CheckoutMerge: false, CheckoutDepth: 50, GpgNoSigningEnabled: true, - Logger: logger, } // Pretend terraform has created a plan file, we'll check for it later @@ -518,7 +508,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { // Run the clone without the checkout merge strategy. It should return // false for mergedAgain - _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "main", @@ -532,7 +522,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { // Run the clone twice with the merge strategy, the first run should // return true for mergedAgain, subsequent runs should // return false since the first call is supposed to merge. - _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ + _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", @@ -542,7 +532,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { Assert(t, mergedAgain == true, "First clone with CheckoutMerge=true with diverged base should have merged") wd.SetCheckForUpstreamChanges() - _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ + _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", @@ -610,15 +600,14 @@ func TestHasDiverged_MasterHasDiverged(t *testing.T) { CheckoutMerge: true, CheckoutDepth: 50, GpgNoSigningEnabled: true, - Logger: logger, } - hasDiverged := wd.HasDiverged(repoDir + "/repos/0/default") + hasDiverged := wd.HasDiverged(logger, repoDir+"/repos/0/default") Equals(t, hasDiverged, true) // Run it again but without the checkout merge strategy. It should return // false. wd.CheckoutMerge = false - hasDiverged = wd.HasDiverged(repoDir + "/repos/0/default") + hasDiverged = wd.HasDiverged(logger, repoDir+"/repos/0/default") Equals(t, hasDiverged, false) } diff --git a/server/logging/simple_logger.go b/server/logging/simple_logger.go index e7d18e5654..5003a1fda0 100644 --- a/server/logging/simple_logger.go +++ b/server/logging/simple_logger.go @@ -19,7 +19,6 @@ package logging import ( "bytes" "fmt" - "testing" "github.com/pkg/errors" "go.uber.org/zap" @@ -184,7 +183,7 @@ func (l *StructuredLogger) saveToHistory(lvl LogLevel, format string, a ...inter // NewNoopLogger creates a logger instance that discards all logs and never // writes them. Used for testing. -func NewNoopLogger(t *testing.T) SimpleLogging { +func NewNoopLogger(t zaptest.TestingT) SimpleLogging { level := zap.DebugLevel return &StructuredLogger{ z: zaptest.NewLogger(t, zaptest.Level(level)).Sugar(), diff --git a/server/router_test.go b/server/router_test.go index 4b683e07cc..02cb51668b 100644 --- a/server/router_test.go +++ b/server/router_test.go @@ -11,7 +11,7 @@ import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" . "github.com/runatlantis/atlantis/testing" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestRouter_GenerateLockURL(t *testing.T) { @@ -107,6 +107,6 @@ func TestGenerateProjectJobURL_ShouldReturnErrorWhenJobIDNotSpecified(t *testing } expectedErrString := "no job id in ctx" gotURL, err := router.GenerateProjectJobURL(ctx) - assert.EqualError(t, err, expectedErrString) + require.EqualError(t, err, expectedErrString) Equals(t, "", gotURL) } diff --git a/server/server.go b/server/server.go index eeab9d732e..39c5ae1bc7 100644 --- a/server/server.go +++ b/server/server.go @@ -50,7 +50,7 @@ import ( "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/controllers" events_controllers "github.com/runatlantis/atlantis/server/controllers/events" - "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/web_templates" "github.com/runatlantis/atlantis/server/controllers/websocket" "github.com/runatlantis/atlantis/server/core/locking" "github.com/runatlantis/atlantis/server/core/runtime" @@ -62,6 +62,7 @@ import ( "github.com/runatlantis/atlantis/server/events/vcs" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver" + "github.com/runatlantis/atlantis/server/events/vcs/gitea" "github.com/runatlantis/atlantis/server/events/webhooks" "github.com/runatlantis/atlantis/server/logging" ) @@ -106,10 +107,10 @@ type Server struct { StatusController *controllers.StatusController JobsController *controllers.JobsController APIController *controllers.APIController - IndexTemplate templates.TemplateWriter - LockDetailTemplate templates.TemplateWriter - ProjectJobsTemplate templates.TemplateWriter - ProjectJobsErrorTemplate templates.TemplateWriter + IndexTemplate web_templates.TemplateWriter + LockDetailTemplate web_templates.TemplateWriter + ProjectJobsTemplate web_templates.TemplateWriter + ProjectJobsErrorTemplate web_templates.TemplateWriter SSLCertFile string SSLKeyFile string CertLastRefreshTime time.Time @@ -176,6 +177,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { var bitbucketCloudClient *bitbucketcloud.Client var bitbucketServerClient *bitbucketserver.Client var azuredevopsClient *vcs.AzureDevopsClient + var giteaClient *gitea.GiteaClient policyChecksEnabled := false if userConfig.EnablePolicyChecksFlag { @@ -228,8 +230,9 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { supportedVCSHosts = append(supportedVCSHosts, models.Github) if userConfig.GithubUser != "" { githubCredentials = &vcs.GithubUserCredentials{ - User: userConfig.GithubUser, - Token: userConfig.GithubToken, + User: userConfig.GithubUser, + Token: userConfig.GithubToken, + TokenFile: userConfig.GithubTokenFile, } } else if userConfig.GithubAppID != 0 && userConfig.GithubAppKeyFile != "" { privateKey, err := os.ReadFile(userConfig.GithubAppKeyFile) @@ -237,24 +240,26 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { return nil, err } githubCredentials = &vcs.GithubAppCredentials{ - AppID: userConfig.GithubAppID, - Key: privateKey, - Hostname: userConfig.GithubHostname, - AppSlug: userConfig.GithubAppSlug, + AppID: userConfig.GithubAppID, + InstallationID: userConfig.GithubAppInstallationID, + Key: privateKey, + Hostname: userConfig.GithubHostname, + AppSlug: userConfig.GithubAppSlug, } githubAppEnabled = true } else if userConfig.GithubAppID != 0 && userConfig.GithubAppKey != "" { githubCredentials = &vcs.GithubAppCredentials{ - AppID: userConfig.GithubAppID, - Key: []byte(userConfig.GithubAppKey), - Hostname: userConfig.GithubHostname, - AppSlug: userConfig.GithubAppSlug, + AppID: userConfig.GithubAppID, + InstallationID: userConfig.GithubAppInstallationID, + Key: []byte(userConfig.GithubAppKey), + Hostname: userConfig.GithubHostname, + AppSlug: userConfig.GithubAppSlug, } githubAppEnabled = true } var err error - rawGithubClient, err := vcs.NewGithubClient(userConfig.GithubHostname, githubCredentials, githubConfig, logger) + rawGithubClient, err := vcs.NewGithubClient(userConfig.GithubHostname, githubCredentials, githubConfig, userConfig.MaxCommentsPerCommand, logger) if err != nil { return nil, err } @@ -300,6 +305,19 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { return nil, err } } + if userConfig.GiteaToken != "" { + supportedVCSHosts = append(supportedVCSHosts, models.Gitea) + + giteaClient, err = gitea.NewClient(userConfig.GiteaBaseURL, userConfig.GiteaUser, userConfig.GiteaToken, userConfig.GiteaPageSize, logger) + if err != nil { + fmt.Println("error setting up gitea client", "error", err) + return nil, errors.Wrapf(err, "setting up Gitea client") + } else { + logger.Info("gitea client configured successfully") + } + } + + logger.Info("Supported VCS Hosts", "hosts", supportedVCSHosts) home, err := homedir.Dir() if err != nil { @@ -333,6 +351,11 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { return nil, err } } + if userConfig.GiteaUser != "" { + if err := vcs.WriteGitCreds(userConfig.GiteaUser, userConfig.GiteaToken, userConfig.GiteaBaseURL, home, logger, false); err != nil { + return nil, err + } + } } // default the project files used to generate the module index to the autoplan-file-list if autoplan-modules is true @@ -356,7 +379,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { if err != nil { return nil, errors.Wrap(err, "initializing webhooks") } - vcsClient := vcs.NewClientProxy(githubClient, gitlabClient, bitbucketCloudClient, bitbucketServerClient, azuredevopsClient) + vcsClient := vcs.NewClientProxy(githubClient, gitlabClient, bitbucketCloudClient, bitbucketServerClient, azuredevopsClient, giteaClient) commitStatusUpdater := &events.DefaultCommitStatusUpdater{Client: vcsClient, StatusName: userConfig.VCSStatusName} binDir, err := mkSubDir(userConfig.DataDir, BinDirName) @@ -399,8 +422,14 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { ) } + distribution := terraform.NewDistributionTerraform() + if userConfig.TFDistribution == "opentofu" { + distribution = terraform.NewDistributionOpenTofu() + } + terraformClient, err := terraform.NewClient( logger, + distribution, binDir, cacheDir, userConfig.TFEToken, @@ -408,7 +437,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { userConfig.DefaultTFVersion, config.DefaultTFVersionFlag, userConfig.TFDownloadURL, - &terraform.DefaultDownloader{}, userConfig.TFDownload, userConfig.UseTFPluginCache, projectCmdOutputHandler) @@ -416,7 +444,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { // are, then we don't error out because we don't have/want terraform // installed on our CI system where the unit tests run. if err != nil && flag.Lookup("test.v") == nil { - return nil, errors.Wrap(err, "initializing terraform") + return nil, errors.Wrap(err, fmt.Sprintf("initializing %s", userConfig.TFDistribution)) } markdownRenderer := events.NewMarkdownRenderer( gitlabClient.SupportsCommonMark(), @@ -469,7 +497,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { CheckoutMerge: userConfig.CheckoutStrategy == "merge", CheckoutDepth: userConfig.CheckoutDepth, GithubAppEnabled: githubAppEnabled, - Logger: logger, } scheduledExecutorService := scheduled.NewExecutorService( @@ -488,7 +515,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { GithubHostname: userConfig.GithubHostname, } - githubAppTokenRotator := vcs.NewGithubAppTokenRotator(logger, githubCredentials, userConfig.GithubHostname, home) + githubAppTokenRotator := vcs.NewGithubTokenRotator(logger, githubCredentials, userConfig.GithubHostname, "x-access-token", home) tokenJd, err := githubAppTokenRotator.GenerateJob() if err != nil { return nil, errors.Wrap(err, "could not write credentials") @@ -496,6 +523,15 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { scheduledExecutorService.AddJob(tokenJd) } + if userConfig.GithubUser != "" && userConfig.GithubTokenFile != "" && userConfig.WriteGitCreds { + githubTokenRotator := vcs.NewGithubTokenRotator(logger, githubCredentials, userConfig.GithubHostname, userConfig.GithubUser, home) + tokenJd, err := githubTokenRotator.GenerateJob() + if err != nil { + return nil, errors.Wrap(err, "could not write credentials") + } + scheduledExecutorService.AddJob(tokenJd) + } + projectLocker := &events.DefaultProjectLocker{ Locker: lockingClient, NoOpLocker: noOpLocker, @@ -503,7 +539,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { } deleteLockCommand := &events.DefaultDeleteLockCommand{ Locker: lockingClient, - Logger: logger, WorkingDir: workingDir, WorkingDirLocker: workingDirLocker, Backend: backend, @@ -515,7 +550,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { &events.PullClosedExecutor{ Locker: lockingClient, WorkingDir: workingDir, - Logger: logger, Backend: backend, PullClosedTemplate: &events.PullClosedEventTemplate{}, LogStreamResourceCleaner: projectCmdOutputHandler, @@ -528,6 +562,8 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { GithubToken: userConfig.GithubToken, GitlabUser: userConfig.GitlabUser, GitlabToken: userConfig.GitlabToken, + GiteaUser: userConfig.GiteaUser, + GiteaToken: userConfig.GiteaToken, AllowDraftPRs: userConfig.PlanDrafts, BitbucketUser: userConfig.BitbucketUser, BitbucketToken: userConfig.BitbucketToken, @@ -538,6 +574,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { commentParser := events.NewCommentParser( userConfig.GithubUser, userConfig.GitlabUser, + userConfig.GiteaUser, userConfig.BitbucketUser, userConfig.AzureDevopsUser, userConfig.ExecutableName, @@ -580,6 +617,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { Router: router, } projectCommandBuilder := events.NewInstrumentedProjectCommandBuilder( + logger, policyChecksEnabled, validator, &events.DefaultProjectFinder{}, @@ -601,7 +639,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverModeFlag, statsScope, - logger, terraformClient, ) @@ -613,7 +650,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { policyCheckStepRunner, err := runtime.NewPolicyCheckStepRunner( defaultTfVersion, - policy.NewConfTestExecutorWorkflow(logger, binDir, &terraform.DefaultDownloader{}), + policy.NewConfTestExecutorWorkflow(logger, binDir, &policy.ConfTestGoGetterVersionDownloader{}), ) if err != nil { @@ -695,7 +732,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { userConfig.QuietPolicyChecks, ) - pullReqStatusFetcher := vcs.NewPullReqStatusFetcher(vcsClient, userConfig.VCSStatusName) + pullReqStatusFetcher := vcs.NewPullReqStatusFetcher(vcsClient, userConfig.VCSStatusName, strings.Split(userConfig.IgnoreVCSStatusNames, ",")) planCommandRunner := events.NewPlanCommandRunner( userConfig.SilenceVCSStatusNoPlans, userConfig.SilenceVCSStatusNoProjects, @@ -784,10 +821,20 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { command.State: stateCommandRunner, } - githubTeamAllowlistChecker, err := events.NewTeamAllowlistChecker(userConfig.GithubTeamAllowlist) - if err != nil { - return nil, err + var teamAllowlistChecker command.TeamAllowlistChecker + if globalCfg.TeamAuthz.Command != "" { + teamAllowlistChecker = &events.ExternalTeamAllowlistChecker{ + Command: globalCfg.TeamAuthz.Command, + ExtraArgs: globalCfg.TeamAuthz.Args, + ExternalTeamAllowlistRunner: &runtime.DefaultExternalTeamAllowlistRunner{}, + } + } else { + teamAllowlistChecker, err = command.NewTeamAllowlistChecker(userConfig.GithubTeamAllowlist) + if err != nil { + return nil, err + } } + varFileAllowlistChecker, err := events.NewVarFileAllowlistChecker(userConfig.VarFileAllowlist) if err != nil { return nil, err @@ -798,6 +845,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { GithubPullGetter: githubClient, GitlabMergeRequestGetter: gitlabClient, AzureDevopsPullGetter: azuredevopsClient, + GiteaPullGetter: giteaClient, CommentCommandRunnerByCmd: commentCommandRunnerByCmd, EventParser: eventParser, FailOnPreWorkflowHookError: userConfig.FailOnPreWorkflowHookError, @@ -814,7 +862,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, PostWorkflowHooksCommandRunner: postWorkflowHooksCommandRunner, PullStatusFetcher: backend, - TeamAllowlistChecker: githubTeamAllowlistChecker, + TeamAllowlistChecker: teamAllowlistChecker, VarFileAllowlistChecker: varFileAllowlistChecker, CommitStatusUpdater: commitStatusUpdater, } @@ -829,7 +877,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { ApplyLocker: applyLockingClient, Logger: logger, VCSClient: vcsClient, - LockDetailTemplate: templates.LockTemplate, + LockDetailTemplate: web_templates.LockTemplate, WorkingDir: workingDir, WorkingDirLocker: workingDirLocker, Backend: backend, @@ -847,24 +895,27 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { AtlantisVersion: config.AtlantisVersion, AtlantisURL: parsedURL, Logger: logger, - ProjectJobsTemplate: templates.ProjectJobsTemplate, - ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate, + ProjectJobsTemplate: web_templates.ProjectJobsTemplate, + ProjectJobsErrorTemplate: web_templates.ProjectJobsErrorTemplate, Backend: backend, WsMux: wsMux, KeyGenerator: controllers.JobIDKeyGenerator{}, StatsScope: statsScope.SubScope("api"), } apiController := &controllers.APIController{ - APISecret: []byte(userConfig.APISecret), - Locker: lockingClient, - Logger: logger, - Parser: eventParser, - ProjectCommandBuilder: projectCommandBuilder, - ProjectPlanCommandRunner: instrumentedProjectCmdRunner, - ProjectApplyCommandRunner: instrumentedProjectCmdRunner, - RepoAllowlistChecker: repoAllowlist, - Scope: statsScope.SubScope("api"), - VCSClient: vcsClient, + APISecret: []byte(userConfig.APISecret), + Locker: lockingClient, + Logger: logger, + Parser: eventParser, + ProjectCommandBuilder: projectCommandBuilder, + ProjectPlanCommandRunner: instrumentedProjectCmdRunner, + ProjectApplyCommandRunner: instrumentedProjectCmdRunner, + FailOnPreWorkflowHookError: userConfig.FailOnPreWorkflowHookError, + PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, + PostWorkflowHooksCommandRunner: postWorkflowHooksCommandRunner, + RepoAllowlistChecker: repoAllowlist, + Scope: statsScope.SubScope("api"), + VCSClient: vcsClient, } eventsController := &events_controllers.VCSEventsController{ @@ -889,6 +940,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { AzureDevopsWebhookBasicUser: []byte(userConfig.AzureDevopsWebhookUser), AzureDevopsWebhookBasicPassword: []byte(userConfig.AzureDevopsWebhookPassword), AzureDevopsRequestValidator: &events_controllers.DefaultAzureDevopsRequestValidator{}, + GiteaWebhookSecret: []byte(userConfig.GiteaWebhookSecret), } githubAppController := &controllers.GithubAppController{ AtlantisURL: parsedURL, @@ -918,10 +970,10 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { JobsController: jobsController, StatusController: statusController, APIController: apiController, - IndexTemplate: templates.IndexTemplate, - LockDetailTemplate: templates.LockTemplate, - ProjectJobsTemplate: templates.ProjectJobsTemplate, - ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate, + IndexTemplate: web_templates.IndexTemplate, + LockDetailTemplate: web_templates.LockTemplate, + ProjectJobsTemplate: web_templates.ProjectJobsTemplate, + ProjectJobsErrorTemplate: web_templates.ProjectJobsErrorTemplate, SSLKeyFile: userConfig.SSLKeyFile, SSLCertFile: userConfig.SSLCertFile, DisableGlobalApplyLock: userConfig.DisableGlobalApplyLock, @@ -1046,10 +1098,10 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { return } - var lockResults []templates.LockIndexData + var lockResults []web_templates.LockIndexData for id, v := range locks { lockURL, _ := s.Router.Get(LockViewRouteName).URL("id", url.QueryEscape(id)) - lockResults = append(lockResults, templates.LockIndexData{ + lockResults = append(lockResults, web_templates.LockIndexData{ // NOTE: must use .String() instead of .Path because we need the // query params as part of the lock URL. LockPath: lockURL.String(), @@ -1059,7 +1111,7 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { Path: v.Project.Path, Workspace: v.Workspace, Time: v.Time, - TimeFormatted: v.Time.Format("02-01-2006 15:04:05"), + TimeFormatted: v.Time.Format("2006-01-02 15:04:05"), }) } @@ -1071,16 +1123,16 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { return } - applyLockData := templates.ApplyLockData{ + applyLockData := web_templates.ApplyLockData{ Time: applyCmdLock.Time, Locked: applyCmdLock.Locked, GlobalApplyLockEnabled: applyCmdLock.GlobalApplyLockEnabled, - TimeFormatted: applyCmdLock.Time.Format("02-01-2006 15:04:05"), + TimeFormatted: applyCmdLock.Time.Format("2006-01-02 15:04:05"), } //Sort by date - newest to oldest. sort.SliceStable(lockResults, func(i, j int) bool { return lockResults[i].Time.After(lockResults[j].Time) }) - err = s.IndexTemplate.Execute(w, templates.IndexData{ + err = s.IndexTemplate.Execute(w, web_templates.IndexData{ Locks: lockResults, PullToJobMapping: preparePullToJobMappings(s), ApplyLock: applyLockData, @@ -1100,7 +1152,7 @@ func preparePullToJobMappings(s *Server) []jobs.PullInfoWithJobIDs { for j := range pullToJobMappings[i].JobIDInfos { jobUrl, _ := s.Router.Get(ProjectJobsViewRouteName).URL("job-id", pullToJobMappings[i].JobIDInfos[j].JobID) pullToJobMappings[i].JobIDInfos[j].JobIDUrl = jobUrl.String() - pullToJobMappings[i].JobIDInfos[j].TimeFormatted = pullToJobMappings[i].JobIDInfos[j].Time.Format("02-01-2006 15:04:05") + pullToJobMappings[i].JobIDInfos[j].TimeFormatted = pullToJobMappings[i].JobIDInfos[j].Time.Format("2006-01-02 15:04:05") } //Sort by date - newest to oldest. diff --git a/server/server_test.go b/server/server_test.go index e9151443e2..4da3583013 100644 --- a/server/server_test.go +++ b/server/server_test.go @@ -27,8 +27,8 @@ import ( "github.com/gorilla/mux" . "github.com/petergtz/pegomock/v4" "github.com/runatlantis/atlantis/server" - "github.com/runatlantis/atlantis/server/controllers/templates" - tMocks "github.com/runatlantis/atlantis/server/controllers/templates/mocks" + "github.com/runatlantis/atlantis/server/controllers/web_templates" + tMocks "github.com/runatlantis/atlantis/server/controllers/web_templates/mocks" "github.com/runatlantis/atlantis/server/core/locking/mocks" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/jobs" @@ -113,19 +113,19 @@ func TestIndex_Success(t *testing.T) { req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil)) w := httptest.NewRecorder() s.Index(w, req) - it.VerifyWasCalledOnce().Execute(w, templates.IndexData{ - ApplyLock: templates.ApplyLockData{ + it.VerifyWasCalledOnce().Execute(w, web_templates.IndexData{ + ApplyLock: web_templates.ApplyLockData{ Locked: false, Time: time.Time{}, - TimeFormatted: "01-01-0001 00:00:00", + TimeFormatted: "0001-01-01 00:00:00", }, - Locks: []templates.LockIndexData{ + Locks: []web_templates.LockIndexData{ { LockPath: "/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault", RepoFullName: "lkysow/atlantis-example", PullNum: 9, Time: now, - TimeFormatted: now.Format("02-01-2006 15:04:05"), + TimeFormatted: now.Format("2006-01-02 15:04:05"), }, }, PullToJobMapping: []jobs.PullInfoWithJobIDs{}, @@ -139,9 +139,12 @@ func TestHealthz(t *testing.T) { req, _ := http.NewRequest("GET", "/healthz", bytes.NewBuffer(nil)) w := httptest.NewRecorder() s.Healthz(w, req) - Equals(t, http.StatusOK, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) - Equals(t, "application/json", w.Result().Header["Content-Type"][0]) + + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusOK, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) + Equals(t, "application/json", resp.Header["Content-Type"][0]) Equals(t, `{ "status": "ok" diff --git a/server/user_config.go b/server/user_config.go index 977b008610..10e6e6b9fc 100644 --- a/server/user_config.go +++ b/server/user_config.go @@ -50,6 +50,7 @@ type UserConfig struct { GithubAllowMergeableBypassApply bool `mapstructure:"gh-allow-mergeable-bypass-apply"` GithubHostname string `mapstructure:"gh-hostname"` GithubToken string `mapstructure:"gh-token"` + GithubTokenFile string `mapstructure:"gh-token-file"` GithubUser string `mapstructure:"gh-user"` GithubWebhookSecret string `mapstructure:"gh-webhook-secret"` GithubOrg string `mapstructure:"gh-org"` @@ -57,7 +58,13 @@ type UserConfig struct { GithubAppKey string `mapstructure:"gh-app-key"` GithubAppKeyFile string `mapstructure:"gh-app-key-file"` GithubAppSlug string `mapstructure:"gh-app-slug"` + GithubAppInstallationID int64 `mapstructure:"gh-app-installation-id"` GithubTeamAllowlist string `mapstructure:"gh-team-allowlist"` + GiteaBaseURL string `mapstructure:"gitea-base-url"` + GiteaToken string `mapstructure:"gitea-token"` + GiteaUser string `mapstructure:"gitea-user"` + GiteaWebhookSecret string `mapstructure:"gitea-webhook-secret"` + GiteaPageSize int `mapstructure:"gitea-page-size"` GitlabHostname string `mapstructure:"gitlab-hostname"` GitlabToken string `mapstructure:"gitlab-token"` GitlabUser string `mapstructure:"gitlab-user"` @@ -68,6 +75,8 @@ type UserConfig struct { LockingDBType string `mapstructure:"locking-db-type"` LogLevel string `mapstructure:"log-level"` MarkdownTemplateOverridesDir string `mapstructure:"markdown-template-overrides-dir"` + MaxCommentsPerCommand int `mapstructure:"max-comments-per-command"` + IgnoreVCSStatusNames string `mapstructure:"ignore-vcs-status-names"` ParallelPoolSize int `mapstructure:"parallel-pool-size"` ParallelPlan bool `mapstructure:"parallel-plan"` ParallelApply bool `mapstructure:"parallel-apply"` @@ -100,6 +109,7 @@ type UserConfig struct { SSLCertFile string `mapstructure:"ssl-cert-file"` SSLKeyFile string `mapstructure:"ssl-key-file"` RestrictFileList bool `mapstructure:"restrict-file-list"` + TFDistribution string `mapstructure:"tf-distribution"` TFDownload bool `mapstructure:"tf-download"` TFDownloadURL string `mapstructure:"tf-download-url"` TFEHostname string `mapstructure:"tfe-hostname"` diff --git a/server/user_config_test.go b/server/user_config_test.go index 490abe7c52..225049f335 100644 --- a/server/user_config_test.go +++ b/server/user_config_test.go @@ -8,6 +8,7 @@ import ( "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestUserConfig_ToAllowCommandNames(t *testing.T) { @@ -61,7 +62,7 @@ func TestUserConfig_ToAllowCommandNames(t *testing.T) { } got, err := u.ToAllowCommandNames() if err != nil { - assert.ErrorContains(t, err, tt.wantErr, "ToAllowCommandNames()") + require.ErrorContains(t, err, tt.wantErr, "ToAllowCommandNames()") } assert.Equalf(t, tt.want, got, "ToAllowCommandNames()") }) diff --git a/server/utils/os.go b/server/utils/os.go new file mode 100644 index 0000000000..2a06d8486e --- /dev/null +++ b/server/utils/os.go @@ -0,0 +1,13 @@ +package utils + +import "os" + +// RemoveIgnoreNonExistent removes a file, ignoring if it doesn't exist. +func RemoveIgnoreNonExistent(file string) error { + err := os.Remove(file) + if err == nil || os.IsNotExist(err) { + return nil + } + + return err +} diff --git a/testdrive/github.go b/testdrive/github.go index b04a6bb2ef..91b961d5ae 100644 --- a/testdrive/github.go +++ b/testdrive/github.go @@ -18,7 +18,7 @@ import ( "strings" "time" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" ) var githubUsername string @@ -55,12 +55,14 @@ func (g *Client) CheckForkSuccess(ownerName string, forkRepoName string) bool { // CreateWebhook creates a GitHub webhook to send requests to our local ngrok. func (g *Client) CreateWebhook(ownerName string, repoName string, hookURL string) error { + contentType := "json" + hookConfig := &github.HookConfig{ + ContentType: &contentType, + URL: &hookURL, + } atlantisHook := &github.Hook{ Events: []string{"issue_comment", "pull_request", "pull_request_review", "push"}, - Config: map[string]interface{}{ - "url": hookURL, - "content_type": "json", - }, + Config: hookConfig, Active: github.Bool(true), } _, _, err := g.client.Repositories.CreateHook(g.ctx, ownerName, repoName, atlantisHook) diff --git a/testdrive/testdrive.go b/testdrive/testdrive.go index bfa18d8a7c..a8bdf7a408 100644 --- a/testdrive/testdrive.go +++ b/testdrive/testdrive.go @@ -31,7 +31,7 @@ import ( "time" "github.com/briandowns/spinner" - "github.com/google/go-github/v58/github" + "github.com/google/go-github/v65/github" "github.com/mitchellh/colorstring" "github.com/pkg/errors" ) @@ -120,7 +120,7 @@ Follow these instructions to create a token (we don't store any tokens): return errors.Wrapf(err, "forking repo %s/%s", terraformExampleRepoOwner, terraformExampleRepo) } if !githubClient.CheckForkSuccess(terraformExampleRepoOwner, terraformExampleRepo) { - return fmt.Errorf("didn't find forked repo %s/%s. fork unsuccessful", terraformExampleRepoOwner, terraformExampleRepoOwner) + return fmt.Errorf("didn't find forked repo %s/%s. fork unsuccessful", terraformExampleRepoOwner, terraformExampleRepo) } s.Stop() colorstring.Println("[green]=> fork completed![reset]") diff --git a/testdrive/utils.go b/testdrive/utils.go index cbf706d587..a447e976bb 100644 --- a/testdrive/utils.go +++ b/testdrive/utils.go @@ -35,7 +35,7 @@ import ( ) const hashicorpReleasesURL = "https://releases.hashicorp.com" -const terraformVersion = "1.7.4" // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp +const terraformVersion = "1.9.8" // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp const ngrokDownloadURL = "https://bin.equinox.io/c/4VmDzA7iaHb" const ngrokAPIURL = "localhost:41414" // We hope this isn't used. const atlantisPort = 4141 diff --git a/testing/Dockerfile b/testing/Dockerfile index 6f5643c355..5c62679bf6 100644 --- a/testing/Dockerfile +++ b/testing/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.22.0 +FROM golang:1.23.2@sha256:ad5c126b5cf501a8caef751a243bb717ec204ab1aa56dc41dc11be089fafcb4f RUN apt-get update && apt-get --no-install-recommends -y install unzip \ && apt-get clean \ @@ -6,7 +6,7 @@ RUN apt-get update && apt-get --no-install-recommends -y install unzip \ # Install Terraform # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ENV TERRAFORM_VERSION=1.7.4 +ENV TERRAFORM_VERSION=1.9.8 RUN case $(uname -m) in x86_64|amd64) ARCH="amd64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \ wget -nv -O terraform.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_${ARCH}.zip && \ mkdir -p /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \ @@ -16,7 +16,7 @@ RUN case $(uname -m) in x86_64|amd64) ARCH="amd64" ;; aarch64|arm64|armv7l) ARCH # Install conftest # renovate: datasource=github-releases depName=open-policy-agent/conftest -ENV CONFTEST_VERSION=0.49.1 +ENV CONFTEST_VERSION=0.56.0 SHELL ["/bin/bash", "-o", "pipefail", "-c"] RUN case $(uname -m) in x86_64|amd64) ARCH="x86_64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \ curl -LOs https://github.com/open-policy-agent/conftest/releases/download/v${CONFTEST_VERSION}/conftest_${CONFTEST_VERSION}_Linux_${ARCH}.tar.gz && \