diff --git a/.circleci/config.yml b/.circleci/config.yml
index 227603cbbd..8f552060aa 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -9,7 +9,7 @@ jobs:
# that flag starts the download asynchronously so we'd have a race
# condition.
# renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp
- TERRAFORM_VERSION: 1.7.4
+ TERRAFORM_VERSION: 1.7.5
steps:
- checkout
- run: make build-service
diff --git a/.dockerignore b/.dockerignore
index d9647e1977..523596ac26 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,5 +1,6 @@
*
!cmd/
+!scripts/download-release.sh
!server/
!testdrive/
!main.go
diff --git a/.github/workflows/website.yml b/.github/workflows/website.yml
index 8d58751deb..04c58736af 100644
--- a/.github/workflows/website.yml
+++ b/.github/workflows/website.yml
@@ -76,6 +76,7 @@ jobs:
-e 'https://medium.com/runatlantis' \
-e 'https://github\.com/runatlantis/atlantis/edit/main/.*' \
-e 'https://github.com/runatlantis/helm-charts#customization' \
+ -e 'https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64-L84' \
-e 'https://confluence.atlassian.com/*' \
--header 'Accept-Encoding:deflate, gzip' \
--buffer-size 8192 \
diff --git a/.node-version b/.node-version
index 2dbbe00e67..2b9cabc07c 100644
--- a/.node-version
+++ b/.node-version
@@ -1 +1 @@
-20.11.1
+20.12.0
diff --git a/.tool-versions b/.tool-versions
index 69031ad023..e2db8c3dfb 100644
--- a/.tool-versions
+++ b/.tool-versions
@@ -1 +1 @@
-pnpm 8.15.4
+pnpm 8.15.5
diff --git a/Dockerfile b/Dockerfile
index 49341b8bc5..1102b06a16 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,15 +2,18 @@
# what distro is the image being built for
ARG ALPINE_TAG=3.19.1
ARG DEBIAN_TAG=12.5-slim
+ARG GOLANG_VERSION=1.22.1
# renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp
ARG DEFAULT_TERRAFORM_VERSION=1.7.2
+# renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp
+ARG DEFAULT_OPENTOFU_VERSION=1.6.2
# renovate: datasource=github-releases depName=open-policy-agent/conftest
ARG DEFAULT_CONFTEST_VERSION=0.49.1
# Stage 1: build artifact and download deps
-FROM golang:1.22.1-alpine AS builder
+FROM golang:${GOLANG_VERSION}-alpine AS builder
ARG ATLANTIS_VERSION=dev
ENV ATLANTIS_VERSION=${ATLANTIS_VERSION}
@@ -68,7 +71,6 @@ ARG TARGETPLATFORM
WORKDIR /tmp/build
# install conftest
-# renovate: datasource=github-releases depName=open-policy-agent/conftest
ARG DEFAULT_CONFTEST_VERSION
ENV DEFAULT_CONFTEST_VERSION=${DEFAULT_CONFTEST_VERSION}
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
@@ -106,31 +108,26 @@ RUN case ${TARGETPLATFORM} in \
git-lfs --version
# install terraform binaries
-# renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp
ARG DEFAULT_TERRAFORM_VERSION
ENV DEFAULT_TERRAFORM_VERSION=${DEFAULT_TERRAFORM_VERSION}
+ARG DEFAULT_OPENTOFU_VERSION
+ENV DEFAULT_OPENTOFU_VERSION=${DEFAULT_OPENTOFU_VERSION}
+
+# COPY scripts/download-release.sh .
+COPY --from=builder /app/scripts/download-release.sh download-release.sh
# In the official Atlantis image, we only have the latest of each Terraform version.
# Each binary is about 80 MB so we limit it to the 4 latest minor releases or fewer
-RUN AVAILABLE_TERRAFORM_VERSIONS="1.4.7 1.5.7 1.6.6 ${DEFAULT_TERRAFORM_VERSION}" && \
- case "${TARGETPLATFORM}" in \
- "linux/amd64") TERRAFORM_ARCH=amd64 ;; \
- "linux/arm64") TERRAFORM_ARCH=arm64 ;; \
- "linux/arm/v7") TERRAFORM_ARCH=arm ;; \
- *) echo "ERROR: 'TARGETPLATFORM' value expected: ${TARGETPLATFORM}"; exit 1 ;; \
- esac && \
- for VERSION in ${AVAILABLE_TERRAFORM_VERSIONS}; do \
- curl -LOs "https://releases.hashicorp.com/terraform/${VERSION}/terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" && \
- curl -LOs "https://releases.hashicorp.com/terraform/${VERSION}/terraform_${VERSION}_SHA256SUMS" && \
- sed -n "/terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip/p" "terraform_${VERSION}_SHA256SUMS" | sha256sum -c && \
- mkdir -p "/usr/local/bin/tf/versions/${VERSION}" && \
- unzip "terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" -d "/usr/local/bin/tf/versions/${VERSION}" && \
- ln -s "/usr/local/bin/tf/versions/${VERSION}/terraform" "/usr/local/bin/terraform${VERSION}" && \
- rm "terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" && \
- rm "terraform_${VERSION}_SHA256SUMS"; \
- done && \
- ln -s "/usr/local/bin/tf/versions/${DEFAULT_TERRAFORM_VERSION}/terraform" /usr/local/bin/terraform
-
+RUN ./download-release.sh \
+ "terraform" \
+ "${TARGETPLATFORM}" \
+ "${DEFAULT_TERRAFORM_VERSION}" \
+ "1.4.7 1.5.7 1.6.6 ${DEFAULT_TERRAFORM_VERSION}" \
+ && ./download-release.sh \
+ "tofu" \
+ "${TARGETPLATFORM}" \
+ "${DEFAULT_OPENTOFU_VERSION}" \
+ "${DEFAULT_OPENTOFU_VERSION}"
# Stage 2 - Alpine
# Creating the individual distro builds using targets
@@ -151,6 +148,7 @@ RUN addgroup atlantis && \
COPY --from=builder /app/atlantis /usr/local/bin/atlantis
# copy terraform binaries
COPY --from=deps /usr/local/bin/terraform* /usr/local/bin/
+COPY --from=deps /usr/local/bin/tofu* /usr/local/bin/
# copy dependencies
COPY --from=deps /usr/local/bin/conftest /usr/local/bin/conftest
COPY --from=deps /usr/bin/git-lfs /usr/bin/git-lfs
@@ -159,7 +157,7 @@ COPY docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh
# Install packages needed to run Atlantis.
# We place this last as it will bust less docker layer caches when packages update
RUN apk add --no-cache \
- ca-certificates~=20230506 \
+ ca-certificates~=20240226-r0 \
curl~=8 \
git~=2 \
unzip~=6 \
@@ -168,7 +166,6 @@ RUN apk add --no-cache \
dumb-init~=1 \
gcompat~=1
-
# Set the entry point to the atlantis user and run the atlantis command
USER atlantis
ENTRYPOINT ["docker-entrypoint.sh"]
@@ -191,6 +188,7 @@ RUN useradd --create-home --user-group --shell /bin/bash atlantis && \
COPY --from=builder /app/atlantis /usr/local/bin/atlantis
# copy terraform binaries
COPY --from=deps /usr/local/bin/terraform* /usr/local/bin/
+COPY --from=deps /usr/local/bin/tofu* /usr/local/bin/
# copy dependencies
COPY --from=deps /usr/local/bin/conftest /usr/local/bin/conftest
COPY --from=deps /usr/bin/git-lfs /usr/bin/git-lfs
diff --git a/README.md b/README.md
index 6471847770..e60bfba133 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@
[![Go Reference](https://pkg.go.dev/badge/github.com/runatlantis/atlantis.svg)](https://pkg.go.dev/github.com/runatlantis/atlantis)
[![codecov](https://codecov.io/gh/runatlantis/atlantis/branch/main/graph/badge.svg)](https://codecov.io/gh/runatlantis/atlantis)
[![CircleCI](https://circleci.com/gh/runatlantis/atlantis/tree/main.svg?style=shield)](https://circleci.com/gh/runatlantis/atlantis/tree/main)
-[![Slack](https://img.shields.io/badge/Join-Atlantis%20Community%20Slack-red)](https://join.slack.com/t/atlantis-community/shared_invite/zt-1nt7yx7uq-AnVRc_JItF1CDwZtfqv_OA)
+[![Slack](https://img.shields.io/badge/Join-Atlantis%20Community%20Slack-red)](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw)
@@ -23,7 +23,7 @@
* How to get started: [www.runatlantis.io/guide](https://www.runatlantis.io/guide)
* Full documentation: [www.runatlantis.io/docs](https://www.runatlantis.io/docs)
* Download the latest release: [github.com/runatlantis/atlantis/releases/latest](https://github.com/runatlantis/atlantis/releases/latest)
-* Get help in our [Slack channel](https://join.slack.com/t/atlantis-community/shared_invite/zt-1nt7yx7uq-AnVRc_JItF1CDwZtfqv_OA)
+* Get help in our [Slack channel](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw)
* Start Contributing: [CONTRIBUTING.md](CONTRIBUTING.md)
## What is Atlantis?
diff --git a/cmd/server.go b/cmd/server.go
index 31ecfd393e..caa1351f7e 100644
--- a/cmd/server.go
+++ b/cmd/server.go
@@ -93,6 +93,11 @@ const (
GHOrganizationFlag = "gh-org"
GHWebhookSecretFlag = "gh-webhook-secret" // nolint: gosec
GHAllowMergeableBypassApply = "gh-allow-mergeable-bypass-apply" // nolint: gosec
+ GiteaBaseURLFlag = "gitea-base-url"
+ GiteaTokenFlag = "gitea-token"
+ GiteaUserFlag = "gitea-user"
+ GiteaWebhookSecretFlag = "gitea-webhook-secret" // nolint: gosec
+ GiteaPageSizeFlag = "gitea-page-size"
GitlabHostnameFlag = "gitlab-hostname"
GitlabTokenFlag = "gitlab-token"
GitlabUserFlag = "gitlab-user"
@@ -156,6 +161,8 @@ const (
DefaultExecutableName = "atlantis"
DefaultMarkdownTemplateOverridesDir = "~/.markdown_templates"
DefaultGHHostname = "github.com"
+ DefaultGiteaBaseURL = "https://gitea.com"
+ DefaultGiteaPageSize = 30
DefaultGitlabHostname = "gitlab.com"
DefaultLockingDBType = "boltdb"
DefaultLogLevel = "info"
@@ -318,6 +325,22 @@ var stringFlags = map[string]stringFlag{
"This means that an attacker could spoof calls to Atlantis and cause it to perform malicious actions. " +
"Should be specified via the ATLANTIS_GH_WEBHOOK_SECRET environment variable.",
},
+ GiteaBaseURLFlag: {
+ description: "Base URL of Gitea server installation. Must include 'http://' or 'https://'.",
+ },
+ GiteaUserFlag: {
+ description: "Gitea username of API user.",
+ defaultValue: "",
+ },
+ GiteaTokenFlag: {
+ description: "Gitea token of API user. Can also be specified via the ATLANTIS_GITEA_TOKEN environment variable.",
+ },
+ GiteaWebhookSecretFlag: {
+ description: "Optional secret used to validate Gitea webhooks." +
+ " SECURITY WARNING: If not specified, Atlantis won't be able to validate that the incoming webhook call came from Gitea. " +
+ "This means that an attacker could spoof calls to Atlantis and cause it to perform malicious actions. " +
+ "Should be specified via the ATLANTIS_GITEA_WEBHOOK_SECRET environment variable.",
+ },
GitlabHostnameFlag: {
description: "Hostname of your GitLab Enterprise installation. If using gitlab.com, no need to set.",
defaultValue: DefaultGitlabHostname,
@@ -568,6 +591,10 @@ var intFlags = map[string]intFlag{
" If merge base is further behind than this number of commits from any of branches heads, full fetch will be performed.",
defaultValue: DefaultCheckoutDepth,
},
+ GiteaPageSizeFlag: {
+ description: "Optional value that specifies the number of results per page to expect from Gitea.",
+ defaultValue: DefaultGiteaPageSize,
+ },
ParallelPoolSize: {
description: "Max size of the wait group that runs parallel plans and applies (if enabled).",
defaultValue: DefaultParallelPoolSize,
@@ -813,6 +840,12 @@ func (s *ServerCmd) setDefaults(c *server.UserConfig) {
if c.GitlabHostname == "" {
c.GitlabHostname = DefaultGitlabHostname
}
+ if c.GiteaBaseURL == "" {
+ c.GiteaBaseURL = DefaultGiteaBaseURL
+ }
+ if c.GiteaPageSize == 0 {
+ c.GiteaPageSize = DefaultGiteaPageSize
+ }
if c.BitbucketBaseURL == "" {
c.BitbucketBaseURL = DefaultBitbucketBaseURL
}
@@ -885,12 +918,17 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error {
// The following combinations are valid.
// 1. github user and token set
// 2. github app ID and (key file set or key set)
- // 3. gitlab user and token set
- // 4. bitbucket user and token set
- // 5. azuredevops user and token set
- // 6. any combination of the above
- vcsErr := fmt.Errorf("--%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s must be set", GHUserFlag, GHTokenFlag, GHAppIDFlag, GHAppKeyFileFlag, GHAppIDFlag, GHAppKeyFlag, GitlabUserFlag, GitlabTokenFlag, BitbucketUserFlag, BitbucketTokenFlag, ADUserFlag, ADTokenFlag)
- if ((userConfig.GithubUser == "") != (userConfig.GithubToken == "")) || ((userConfig.GitlabUser == "") != (userConfig.GitlabToken == "")) || ((userConfig.BitbucketUser == "") != (userConfig.BitbucketToken == "")) || ((userConfig.AzureDevopsUser == "") != (userConfig.AzureDevopsToken == "")) {
+ // 3. gitea user and token set
+ // 4. gitlab user and token set
+ // 5. bitbucket user and token set
+ // 6. azuredevops user and token set
+ // 7. any combination of the above
+ vcsErr := fmt.Errorf("--%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s must be set", GHUserFlag, GHTokenFlag, GHAppIDFlag, GHAppKeyFileFlag, GHAppIDFlag, GHAppKeyFlag, GiteaUserFlag, GiteaTokenFlag, GitlabUserFlag, GitlabTokenFlag, BitbucketUserFlag, BitbucketTokenFlag, ADUserFlag, ADTokenFlag)
+ if ((userConfig.GithubUser == "") != (userConfig.GithubToken == "")) ||
+ ((userConfig.GiteaUser == "") != (userConfig.GiteaToken == "")) ||
+ ((userConfig.GitlabUser == "") != (userConfig.GitlabToken == "")) ||
+ ((userConfig.BitbucketUser == "") != (userConfig.BitbucketToken == "")) ||
+ ((userConfig.AzureDevopsUser == "") != (userConfig.AzureDevopsToken == "")) {
return vcsErr
}
if (userConfig.GithubAppID != 0) && ((userConfig.GithubAppKey == "") && (userConfig.GithubAppKeyFile == "")) {
@@ -901,7 +939,7 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error {
}
// At this point, we know that there can't be a single user/token without
// its partner, but we haven't checked if any user/token is set at all.
- if userConfig.GithubAppID == 0 && userConfig.GithubUser == "" && userConfig.GitlabUser == "" && userConfig.BitbucketUser == "" && userConfig.AzureDevopsUser == "" {
+ if userConfig.GithubAppID == 0 && userConfig.GithubUser == "" && userConfig.GiteaUser == "" && userConfig.GitlabUser == "" && userConfig.BitbucketUser == "" && userConfig.AzureDevopsUser == "" {
return vcsErr
}
@@ -924,6 +962,14 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error {
return fmt.Errorf("--%s must have http:// or https://, got %q", BitbucketBaseURLFlag, userConfig.BitbucketBaseURL)
}
+ parsed, err = url.Parse(userConfig.GiteaBaseURL)
+ if err != nil {
+ return fmt.Errorf("error parsing --%s flag value %q: %s", GiteaWebhookSecretFlag, userConfig.GiteaBaseURL, err)
+ }
+ if parsed.Scheme != "http" && parsed.Scheme != "https" {
+ return fmt.Errorf("--%s must have http:// or https://, got %q", GiteaBaseURLFlag, userConfig.GiteaBaseURL)
+ }
+
if userConfig.RepoConfig != "" && userConfig.RepoConfigJSON != "" {
return fmt.Errorf("cannot use --%s and --%s at the same time", RepoConfigFlag, RepoConfigJSONFlag)
}
@@ -936,6 +982,8 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error {
GitlabWebhookSecretFlag: userConfig.GitlabWebhookSecret,
BitbucketTokenFlag: userConfig.BitbucketToken,
BitbucketWebhookSecretFlag: userConfig.BitbucketWebhookSecret,
+ GiteaTokenFlag: userConfig.GiteaToken,
+ GiteaWebhookSecretFlag: userConfig.GiteaWebhookSecret,
} {
if strings.Contains(token, "\n") {
s.Logger.Warn("--%s contains a newline which is usually unintentional", name)
@@ -1029,6 +1077,7 @@ func (s *ServerCmd) setVarFileAllowlist(userConfig *server.UserConfig) {
// trimAtSymbolFromUsers trims @ from the front of the github and gitlab usernames
func (s *ServerCmd) trimAtSymbolFromUsers(userConfig *server.UserConfig) {
userConfig.GithubUser = strings.TrimPrefix(userConfig.GithubUser, "@")
+ userConfig.GiteaUser = strings.TrimPrefix(userConfig.GiteaUser, "@")
userConfig.GitlabUser = strings.TrimPrefix(userConfig.GitlabUser, "@")
userConfig.BitbucketUser = strings.TrimPrefix(userConfig.BitbucketUser, "@")
userConfig.AzureDevopsUser = strings.TrimPrefix(userConfig.AzureDevopsUser, "@")
@@ -1038,6 +1087,9 @@ func (s *ServerCmd) securityWarnings(userConfig *server.UserConfig) {
if userConfig.GithubUser != "" && userConfig.GithubWebhookSecret == "" && !s.SilenceOutput {
s.Logger.Warn("no GitHub webhook secret set. This could allow attackers to spoof requests from GitHub")
}
+ if userConfig.GiteaUser != "" && userConfig.GiteaWebhookSecret == "" && !s.SilenceOutput {
+ s.Logger.Warn("no Gitea webhook secret set. This could allow attackers to spoof requests from Gitea")
+ }
if userConfig.GitlabUser != "" && userConfig.GitlabWebhookSecret == "" && !s.SilenceOutput {
s.Logger.Warn("no GitLab webhook secret set. This could allow attackers to spoof requests from GitLab")
}
diff --git a/cmd/server_test.go b/cmd/server_test.go
index 81b834151d..1d5ff3c77a 100644
--- a/cmd/server_test.go
+++ b/cmd/server_test.go
@@ -93,6 +93,11 @@ var testFlags = map[string]interface{}{
GHAppSlugFlag: "atlantis",
GHOrganizationFlag: "",
GHWebhookSecretFlag: "secret",
+ GiteaBaseURLFlag: "http://localhost",
+ GiteaTokenFlag: "gitea-token",
+ GiteaUserFlag: "gitea-user",
+ GiteaWebhookSecretFlag: "gitea-secret",
+ GiteaPageSizeFlag: 30,
GitlabHostnameFlag: "gitlab-hostname",
GitlabTokenFlag: "gitlab-token",
GitlabUserFlag: "gitlab-user",
@@ -156,6 +161,7 @@ func TestExecute_Defaults(t *testing.T) {
c := setup(map[string]interface{}{
GHUserFlag: "user",
GHTokenFlag: "token",
+ GiteaBaseURLFlag: "http://localhost",
RepoAllowlistFlag: "*",
}, t)
err := c.Execute()
@@ -174,6 +180,7 @@ func TestExecute_Defaults(t *testing.T) {
strExceptions := map[string]string{
GHUserFlag: "user",
GHTokenFlag: "token",
+ GiteaBaseURLFlag: "http://localhost",
DataDirFlag: dataDir,
MarkdownTemplateOverridesDirFlag: markdownTemplateOverridesDir,
AtlantisURLFlag: "http://" + hostname + ":4141",
@@ -422,7 +429,7 @@ func TestExecute_ValidateSSLConfig(t *testing.T) {
}
func TestExecute_ValidateVCSConfig(t *testing.T) {
- expErr := "--gh-user/--gh-token or --gh-app-id/--gh-app-key-file or --gh-app-id/--gh-app-key or --gitlab-user/--gitlab-token or --bitbucket-user/--bitbucket-token or --azuredevops-user/--azuredevops-token must be set"
+ expErr := "--gh-user/--gh-token or --gh-app-id/--gh-app-key-file or --gh-app-id/--gh-app-key or --gitea-user/--gitea-token or --gitlab-user/--gitlab-token or --bitbucket-user/--bitbucket-token or --azuredevops-user/--azuredevops-token must be set"
cases := []struct {
description string
flags map[string]interface{}
@@ -440,6 +447,13 @@ func TestExecute_ValidateVCSConfig(t *testing.T) {
},
true,
},
+ {
+ "just gitea token set",
+ map[string]interface{}{
+ GiteaTokenFlag: "token",
+ },
+ true,
+ },
{
"just gitlab token set",
map[string]interface{}{
@@ -468,6 +482,13 @@ func TestExecute_ValidateVCSConfig(t *testing.T) {
},
true,
},
+ {
+ "just gitea user set",
+ map[string]interface{}{
+ GiteaUserFlag: "user",
+ },
+ true,
+ },
{
"just github app set",
map[string]interface{}{
@@ -534,6 +555,22 @@ func TestExecute_ValidateVCSConfig(t *testing.T) {
},
true,
},
+ {
+ "github user and gitea token set",
+ map[string]interface{}{
+ GHUserFlag: "user",
+ GiteaTokenFlag: "token",
+ },
+ true,
+ },
+ {
+ "gitea user and github token set",
+ map[string]interface{}{
+ GiteaUserFlag: "user",
+ GHTokenFlag: "token",
+ },
+ true,
+ },
{
"github user and github token set and should be successful",
map[string]interface{}{
@@ -542,6 +579,14 @@ func TestExecute_ValidateVCSConfig(t *testing.T) {
},
false,
},
+ {
+ "gitea user and gitea token set and should be successful",
+ map[string]interface{}{
+ GiteaUserFlag: "user",
+ GiteaTokenFlag: "token",
+ },
+ false,
+ },
{
"github app and key file set and should be successful",
map[string]interface{}{
@@ -587,6 +632,8 @@ func TestExecute_ValidateVCSConfig(t *testing.T) {
map[string]interface{}{
GHUserFlag: "user",
GHTokenFlag: "token",
+ GiteaUserFlag: "user",
+ GiteaTokenFlag: "token",
GitlabUserFlag: "user",
GitlabTokenFlag: "token",
BitbucketUserFlag: "user",
@@ -699,6 +746,19 @@ func TestExecute_GithubApp(t *testing.T) {
Equals(t, int64(1), passedConfig.GithubAppID)
}
+func TestExecute_GiteaUser(t *testing.T) {
+ t.Log("Should remove the @ from the gitea username if it's passed.")
+ c := setup(map[string]interface{}{
+ GiteaUserFlag: "@user",
+ GiteaTokenFlag: "token",
+ RepoAllowlistFlag: "*",
+ }, t)
+ err := c.Execute()
+ Ok(t, err)
+
+ Equals(t, "user", passedConfig.GiteaUser)
+}
+
func TestExecute_GitlabUser(t *testing.T) {
t.Log("Should remove the @ from the gitlab username if it's passed.")
c := setup(map[string]interface{}{
@@ -934,3 +994,45 @@ func configVal(t *testing.T, u server.UserConfig, tag string) interface{} {
t.Fatalf("no field with tag %q found", tag)
return nil
}
+
+// Gitea base URL must have a scheme.
+func TestExecute_GiteaBaseURLScheme(t *testing.T) {
+ c := setup(map[string]interface{}{
+ GiteaUserFlag: "user",
+ GiteaTokenFlag: "token",
+ RepoAllowlistFlag: "*",
+ GiteaBaseURLFlag: "mydomain.com",
+ }, t)
+ ErrEquals(t, "--gitea-base-url must have http:// or https://, got \"mydomain.com\"", c.Execute())
+
+ c = setup(map[string]interface{}{
+ GiteaUserFlag: "user",
+ GiteaTokenFlag: "token",
+ RepoAllowlistFlag: "*",
+ GiteaBaseURLFlag: "://mydomain.com",
+ }, t)
+ ErrEquals(t, "error parsing --gitea-webhook-secret flag value \"://mydomain.com\": parse \"://mydomain.com\": missing protocol scheme", c.Execute())
+}
+
+func TestExecute_GiteaWithWebhookSecret(t *testing.T) {
+ c := setup(map[string]interface{}{
+ GiteaUserFlag: "user",
+ GiteaTokenFlag: "token",
+ RepoAllowlistFlag: "*",
+ GiteaWebhookSecretFlag: "my secret",
+ }, t)
+ err := c.Execute()
+ Ok(t, err)
+}
+
+// Port should be retained on base url.
+func TestExecute_GiteaBaseURLPort(t *testing.T) {
+ c := setup(map[string]interface{}{
+ GiteaUserFlag: "user",
+ GiteaTokenFlag: "token",
+ RepoAllowlistFlag: "*",
+ GiteaBaseURLFlag: "http://mydomain.com:7990",
+ }, t)
+ Ok(t, c.Execute())
+ Equals(t, "http://mydomain.com:7990", passedConfig.GiteaBaseURL)
+}
diff --git a/go.mod b/go.mod
index e3f32b29a0..9b2e7bbeda 100644
--- a/go.mod
+++ b/go.mod
@@ -1,11 +1,12 @@
module github.com/runatlantis/atlantis
-go 1.22.0
+go 1.22.1
require (
+ code.gitea.io/sdk/gitea v0.17.1
github.com/Masterminds/sprig/v3 v3.2.3
- github.com/alicebob/miniredis/v2 v2.31.1
- github.com/bradleyfalzon/ghinstallation/v2 v2.9.0
+ github.com/alicebob/miniredis/v2 v2.32.1
+ github.com/bradleyfalzon/ghinstallation/v2 v2.10.0
github.com/briandowns/spinner v1.23.0
github.com/cactus/go-statsd-client/v5 v5.1.0
github.com/go-ozzo/ozzo-validation v3.6.0+incompatible
@@ -41,8 +42,8 @@ require (
github.com/stretchr/testify v1.9.0
github.com/uber-go/tally/v4 v4.1.10
github.com/urfave/negroni/v3 v3.1.0
- github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605
- github.com/xanzy/go-gitlab v0.99.0
+ github.com/warrensbox/terraform-switcher v0.1.1-0.20240401233740-6aaa2c4f4b16
+ github.com/xanzy/go-gitlab v0.100.0
go.etcd.io/bbolt v1.3.9
go.uber.org/zap v1.27.0
golang.org/x/term v0.18.0
@@ -55,7 +56,7 @@ require (
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
- github.com/hashicorp/hcl/v2 v2.20.0
+ github.com/hashicorp/hcl/v2 v2.20.1
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29 // indirect
@@ -64,8 +65,6 @@ require (
require github.com/twmb/murmur3 v1.1.8 // indirect
-require github.com/google/go-github/v57 v57.0.0 // indirect
-
require (
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.2.1 // indirect
@@ -77,13 +76,16 @@ require (
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
+ github.com/davidmz/go-pageant v1.0.2 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/fatih/color v1.15.0 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
+ github.com/go-fed/httpsig v1.1.0 // indirect
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/google/go-cmp v0.6.0 // indirect
+ github.com/google/go-github/v60 v60.0.0 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/gorilla/css v1.0.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
@@ -97,7 +99,7 @@ require (
github.com/kr/text v0.2.0 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
- github.com/mattn/go-isatty v0.0.19 // indirect
+ github.com/mattn/go-isatty v0.0.20 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
@@ -120,7 +122,7 @@ require (
github.com/spf13/cast v1.6.0 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/ulikunitz/xz v0.5.11 // indirect
- github.com/yuin/gopher-lua v1.1.0 // indirect
+ github.com/yuin/gopher-lua v1.1.1 // indirect
github.com/zclconf/go-cty v1.13.2 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.19.0 // indirect
@@ -132,6 +134,6 @@ require (
golang.org/x/time v0.5.0 // indirect
golang.org/x/tools v0.13.0 // indirect
google.golang.org/appengine v1.6.7 // indirect
- google.golang.org/protobuf v1.31.0 // indirect
+ google.golang.org/protobuf v1.33.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
)
diff --git a/go.sum b/go.sum
index df24849fc2..4291836e3d 100644
--- a/go.sum
+++ b/go.sum
@@ -30,10 +30,11 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
+code.gitea.io/sdk/gitea v0.17.1 h1:3jCPOG2ojbl8AcfaUCRYLT5MUcBMFwS0OSK2mA5Zok8=
+code.gitea.io/sdk/gitea v0.17.1/go.mod h1:aCnBqhHpoEWA180gMbaCtdX9Pl6BWBAuuP2miadoTNM=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
-github.com/DmitriyVTitov/size v1.5.0/go.mod h1:le6rNI4CoLQV1b9gzp1+3d7hMAD/uu2QcJ+aYbNgiU0=
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
@@ -50,8 +51,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk=
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc=
-github.com/alicebob/miniredis/v2 v2.31.1 h1:7XAt0uUg3DtwEKW5ZAGa+K7FZV2DdKQo5K/6TTnfX8Y=
-github.com/alicebob/miniredis/v2 v2.31.1/go.mod h1:UB/T2Uztp7MlFSDakaX1sTXUv5CASoprx0wulRT6HBg=
+github.com/alicebob/miniredis/v2 v2.32.1 h1:Bz7CciDnYSaa0mX5xODh6GUITRSx+cVhjNoOR4JssBo=
+github.com/alicebob/miniredis/v2 v2.32.1/go.mod h1:AqkLNAfUm0K07J28hnAyyQKf/x0YkCY/g5DCtuL01Mw=
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
@@ -66,8 +67,8 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas=
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4=
-github.com/bradleyfalzon/ghinstallation/v2 v2.9.0 h1:HmxIYqnxubRYcYGRc5v3wUekmo5Wv2uX3gukmWJ0AFk=
-github.com/bradleyfalzon/ghinstallation/v2 v2.9.0/go.mod h1:wmkTDJf8CmVypxE8ijIStFnKoTa6solK5QfdmJrP9KI=
+github.com/bradleyfalzon/ghinstallation/v2 v2.10.0 h1:XWuWBRFEpqVrHepQob9yPS3Xg4K3Wr9QCx4fu8HbUNg=
+github.com/bradleyfalzon/ghinstallation/v2 v2.10.0/go.mod h1:qoGA4DxWPaYTgVCrmEspVSjlTu4WYAiSxMIhorMRXXc=
github.com/briandowns/spinner v1.23.0 h1:alDF2guRWqa/FOZZYWjlMIx2L6H0wyewPxo/CH4Pt2A=
github.com/briandowns/spinner v1.23.0/go.mod h1:rPG4gmXeN3wQV/TsAY4w8lPdIM6RX3yqeBQJSrbXjuE=
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
@@ -93,6 +94,8 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davidmz/go-pageant v1.0.2 h1:bPblRCh5jGU+Uptpz6LgMZGD5hJoOt7otgT454WvHn0=
+github.com/davidmz/go-pageant v1.0.2/go.mod h1:P2EDDnMqIwG5Rrp05dTRITj9z2zpGcD9efWSkTNKLIE=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -107,6 +110,8 @@ github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nos
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
+github.com/go-fed/httpsig v1.1.0 h1:9M+hb0jkEICD8/cAiNqEB66R87tTINszBRTjwjQzWcI=
+github.com/go-fed/httpsig v1.1.0/go.mod h1:RCMrTZvN1bJYtofsG4rd5NaO5obxQ5xBkdiS7xsT7bM=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
@@ -145,7 +150,6 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
-github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
@@ -186,10 +190,10 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
-github.com/google/go-github/v57 v57.0.0 h1:L+Y3UPTY8ALM8x+TV0lg+IEBI+upibemtBD8Q9u7zHs=
-github.com/google/go-github/v57 v57.0.0/go.mod h1:s0omdnye0hvK/ecLvpsGfJMiRt85PimQh4oygmLIxHw=
github.com/google/go-github/v59 v59.0.0 h1:7h6bgpF5as0YQLLkEiVqpgtJqjimMYhBkD4jT5aN3VA=
github.com/google/go-github/v59 v59.0.0/go.mod h1:rJU4R0rQHFVFDOkqGWxfLNo6vEk4dv40oDjhV/gH6wM=
+github.com/google/go-github/v60 v60.0.0 h1:oLG98PsLauFvvu4D/YPxq374jhSxFYdzQGNCyONLfn8=
+github.com/google/go-github/v60 v60.0.0/go.mod h1:ByhX2dP9XT9o/ll2yXAu2VD8l5eNVg8hD4Cr0S/LmQk=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
@@ -244,8 +248,8 @@ github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
-github.com/hashicorp/hcl/v2 v2.20.0 h1:l++cRs/5jQOiKVvqXZm/P1ZEfVXJmvLS9WSVxkaeTb4=
-github.com/hashicorp/hcl/v2 v2.20.0/go.mod h1:WmcD/Ym72MDOOx5F62Ly+leloeu6H7m0pG7VBiU6pQk=
+github.com/hashicorp/hcl/v2 v2.20.1 h1:M6hgdyz7HYt1UN9e61j+qKJBqR3orTWbI1HKBJEdxtc=
+github.com/hashicorp/hcl/v2 v2.20.1/go.mod h1:TZDqQ4kNKCbh1iJp99FdPiUaVDDUPivbqxZulxDYqL4=
github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72 h1:nZ5gGjbe5o7XUu1d7j+Y5Ztcxlp+yaumTKH9i0D3wlg=
github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72/go.mod h1:l8HcFPm9cQh6Q0KSWoYPiePqMvRFenybP1CH2MjKdlg=
github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
@@ -280,8 +284,6 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
-github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
-github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
@@ -289,8 +291,8 @@ github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3v
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
-github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
-github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo=
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
@@ -379,8 +381,6 @@ github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6g
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/samber/lo v1.38.1 h1:j2XEAqXKb09Am4ebOg31SpvzUTTs6EN3VfgeLUhPdXM=
github.com/samber/lo v1.38.1/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA=
-github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
-github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
@@ -414,6 +414,7 @@ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
@@ -430,18 +431,20 @@ github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8=
github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/urfave/negroni/v3 v3.1.0 h1:lzmuxGSpnJCT/ujgIAjkU3+LW3NX8alCglO/L6KjIGQ=
github.com/urfave/negroni/v3 v3.1.0/go.mod h1:jWvnX03kcSjDBl/ShB0iHvx5uOs7mAzZXW+JvJ5XYAs=
-github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605 h1:bRt3KvPapqnO3s9XenyU4COpU9X7cNW3BMELyHRxuSs=
-github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605/go.mod h1:saryXNaL624mlulV138FP+HhVw7IpvETUXLS3nTvH1g=
-github.com/xanzy/go-gitlab v0.99.0 h1:0W5dmFQejPlqnScZoGRXNPmx+evOxBMk50P40cxlnWU=
-github.com/xanzy/go-gitlab v0.99.0/go.mod h1:ETg8tcj4OhrB84UEgeE8dSuV/0h4BBL1uOV/qK0vlyI=
+github.com/warrensbox/terraform-switcher v0.1.1-0.20240401233740-6aaa2c4f4b16 h1:kWefy4KtQjvrDvdpf15ETQ6BM/KiZxH4k2V4udtVFsM=
+github.com/warrensbox/terraform-switcher v0.1.1-0.20240401233740-6aaa2c4f4b16/go.mod h1:YeT46IAQhFHRM6L1+rwZdB2iacYVGIURR+iYH+3hqGs=
+github.com/xanzy/go-gitlab v0.100.0 h1:jaOtYj5nWI19+9oVVmgy233pax2oYqucwetogYU46ks=
+github.com/xanzy/go-gitlab v0.100.0/go.mod h1:ETg8tcj4OhrB84UEgeE8dSuV/0h4BBL1uOV/qK0vlyI=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
-github.com/yuin/gopher-lua v1.1.0 h1:BojcDhfyDWgU2f2TOzYK/g5p2gxMrku8oupLDqlnSqE=
-github.com/yuin/gopher-lua v1.1.0/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
+github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
+github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
github.com/zclconf/go-cty v1.13.2 h1:4GvrUxe/QUDYuJKAav4EYqdM47/kZa672LwmXFmEKT0=
github.com/zclconf/go-cty v1.13.2/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0=
+github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b h1:FosyBZYxY34Wul7O/MSKey3txpPYyCqVO5ZyceuQJEI=
+github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8=
go.etcd.io/bbolt v1.3.9 h1:8x7aARPEXiXbHmtUwAIv7eV2fQFHrLLavdiJ3uzJXoI=
go.etcd.io/bbolt v1.3.9/go.mod h1:zaO32+Ti0PK1ivdPtgMESzuzL2VPoIG1PCQNvOdo/dE=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
@@ -466,8 +469,10 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200403201458-baeed622b8d8/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
+golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@@ -503,6 +508,7 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -539,6 +545,8 @@ golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
+golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
+golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@@ -561,6 +569,7 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE=
golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -606,12 +615,18 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
+golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
+golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
+golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -622,6 +637,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -670,6 +687,7 @@ golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -753,8 +771,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
-google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
-google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
+google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
+google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
diff --git a/package.json b/package.json
index 6874b9f97f..62462ad0c5 100644
--- a/package.json
+++ b/package.json
@@ -1,13 +1,15 @@
{
"license": "Apache-2.0",
"devDependencies": {
- "@vuepress/client": "2.0.0-rc.0",
- "@vuepress/plugin-docsearch": "2.0.0-rc.0",
- "@vuepress/plugin-google-analytics": "2.0.0-rc.15",
- "@vuepress/utils": "2.0.0-rc.0",
- "vue": "^3.3.11",
- "vuepress": "2.0.0-rc.0",
- "vuepress-plugin-sitemap2": "2.0.0-rc.4"
+ "@vuepress/bundler-webpack": "2.0.0-rc.9",
+ "@vuepress/plugin-docsearch": "2.0.0-rc.21",
+ "@vuepress/plugin-google-analytics": "2.0.0-rc.21",
+ "@vuepress/plugin-sitemap": "2.0.0-rc.21",
+ "@vuepress/theme-default": "2.0.0-rc.21",
+ "@vuepress/utils": "2.0.0-rc.9",
+ "sass-loader": "14.1.1",
+ "vuepress": "2.0.0-rc.9",
+ "vue": "3.4.21"
},
"scripts": {
"website:dev": "vuepress dev runatlantis.io",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 5a075817d2..a193b06497 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -5,166 +5,196 @@ settings:
excludeLinksFromLockfile: false
devDependencies:
- '@vuepress/client':
- specifier: 2.0.0-rc.0
- version: 2.0.0-rc.0
+ '@vuepress/bundler-webpack':
+ specifier: 2.0.0-rc.9
+ version: 2.0.0-rc.9
'@vuepress/plugin-docsearch':
- specifier: 2.0.0-rc.0
- version: 2.0.0-rc.0(@algolia/client-search@4.21.1)(search-insights@2.13.0)
+ specifier: 2.0.0-rc.21
+ version: 2.0.0-rc.21(@algolia/client-search@4.23.0)(search-insights@2.13.0)(vuepress@2.0.0-rc.9)
'@vuepress/plugin-google-analytics':
- specifier: 2.0.0-rc.15
- version: 2.0.0-rc.15(vuepress@2.0.0-rc.0)
+ specifier: 2.0.0-rc.21
+ version: 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-sitemap':
+ specifier: 2.0.0-rc.21
+ version: 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/theme-default':
+ specifier: 2.0.0-rc.21
+ version: 2.0.0-rc.21(sass-loader@14.1.1)(vuepress@2.0.0-rc.9)
'@vuepress/utils':
- specifier: 2.0.0-rc.0
- version: 2.0.0-rc.0
+ specifier: 2.0.0-rc.9
+ version: 2.0.0-rc.9
+ sass-loader:
+ specifier: 14.1.1
+ version: 14.1.1(webpack@5.91.0)
vue:
- specifier: ^3.3.11
- version: 3.3.11
+ specifier: 3.4.21
+ version: 3.4.21
vuepress:
- specifier: 2.0.0-rc.0
- version: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11)
- vuepress-plugin-sitemap2:
- specifier: 2.0.0-rc.4
- version: 2.0.0-rc.4(vuepress@2.0.0-rc.0)
+ specifier: 2.0.0-rc.9
+ version: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
packages:
- /@algolia/autocomplete-core@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0):
+ /@algolia/autocomplete-core@1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0)(search-insights@2.13.0):
resolution: {integrity: sha512-009HdfugtGCdC4JdXUbVJClA0q0zh24yyePn+KUGk3rP7j8FEe/m5Yo/z65gn6nP/cM39PxpzqKrL7A6fP6PPw==}
dependencies:
- '@algolia/autocomplete-plugin-algolia-insights': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0)
- '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)
+ '@algolia/autocomplete-plugin-algolia-insights': 1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0)(search-insights@2.13.0)
+ '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0)
transitivePeerDependencies:
- '@algolia/client-search'
- algoliasearch
- search-insights
dev: true
- /@algolia/autocomplete-plugin-algolia-insights@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0):
+ /@algolia/autocomplete-plugin-algolia-insights@1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0)(search-insights@2.13.0):
resolution: {integrity: sha512-a/yTUkcO/Vyy+JffmAnTWbr4/90cLzw+CC3bRbhnULr/EM0fGNvM13oQQ14f2moLMcVDyAx/leczLlAOovhSZg==}
peerDependencies:
search-insights: '>= 1 < 3'
dependencies:
- '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)
+ '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0)
search-insights: 2.13.0
transitivePeerDependencies:
- '@algolia/client-search'
- algoliasearch
dev: true
- /@algolia/autocomplete-preset-algolia@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1):
+ /@algolia/autocomplete-preset-algolia@1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0):
resolution: {integrity: sha512-d4qlt6YmrLMYy95n5TB52wtNDr6EgAIPH81dvvvW8UmuWRgxEtY0NJiPwl/h95JtG2vmRM804M0DSwMCNZlzRA==}
peerDependencies:
'@algolia/client-search': '>= 4.9.1 < 6'
algoliasearch: '>= 4.9.1 < 6'
dependencies:
- '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)
- '@algolia/client-search': 4.21.1
- algoliasearch: 4.21.1
+ '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0)
+ '@algolia/client-search': 4.23.0
+ algoliasearch: 4.23.0
dev: true
- /@algolia/autocomplete-shared@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1):
+ /@algolia/autocomplete-shared@1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0):
resolution: {integrity: sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ==}
peerDependencies:
'@algolia/client-search': '>= 4.9.1 < 6'
algoliasearch: '>= 4.9.1 < 6'
dependencies:
- '@algolia/client-search': 4.21.1
- algoliasearch: 4.21.1
+ '@algolia/client-search': 4.23.0
+ algoliasearch: 4.23.0
dev: true
- /@algolia/cache-browser-local-storage@4.21.1:
- resolution: {integrity: sha512-vUkac/vgj8inyGR/IgunRjTOQ6IlBwl7afFkIfUZRqbqKKXBs+A/g5wgH+UnAlCSW8wjFRAIfCzuvSRb1/qjsQ==}
+ /@algolia/cache-browser-local-storage@4.23.0:
+ resolution: {integrity: sha512-AExxSo/WprsEPny/0whc4Ap2qP2z9C8J8ez7x54tX6s5MV0Rrty0UKDR2pdzS9ou5YvdWqCwNw3c2wkpyOv8/g==}
dependencies:
- '@algolia/cache-common': 4.21.1
+ '@algolia/cache-common': 4.23.0
dev: true
- /@algolia/cache-common@4.21.1:
- resolution: {integrity: sha512-HUo4fRk8KXFMyCASW0k+Kl8iXBoRPdqAjV9OVaFibTNg1dbwnpe6eIxbSTM6AJ2X82ic/8x3GuAO8zF/E515PA==}
+ /@algolia/cache-common@4.23.0:
+ resolution: {integrity: sha512-jz/kZm8Vyp//aympt7OMMVTHw4YD6TskyOxJhtBInqS//CaNA07GiJwL7WFOM422fz9WokbeGaTAFNtLkZP0BQ==}
dev: true
- /@algolia/cache-in-memory@4.21.1:
- resolution: {integrity: sha512-+l2pLg6yIwRaGNtv41pGF/f/e9Qk80FeYE41f4OXS9lb5vpyrxzqM5nUaffWk/ZSFrPDuw5J2E226c//tIIffA==}
+ /@algolia/cache-in-memory@4.23.0:
+ resolution: {integrity: sha512-OujHfXoI4WvH7FswJiNtBkWpqgvxiEHzRYUPunYdim8s4BH461OIv7cDadlnNdAJn9wVsgzN8Ouf4mkuNrnwuA==}
dependencies:
- '@algolia/cache-common': 4.21.1
+ '@algolia/cache-common': 4.23.0
dev: true
- /@algolia/client-account@4.21.1:
- resolution: {integrity: sha512-AC6SjA9n38th73gAUqcjsuxNUChpwaflaAhPL0qO9cUICN67njpQrnYaoSVZ/yx0opG5zQFRKbpEcuPGj0XjhQ==}
+ /@algolia/client-account@4.23.0:
+ resolution: {integrity: sha512-p6IYkjKylUGsiZP1Y6hmjVvTnwKtpTyQScSbjVCGEfDC6858N+ieZrg60ZDGSOl40pGG4VHgCTyiDW1uESsN1A==}
dependencies:
- '@algolia/client-common': 4.21.1
- '@algolia/client-search': 4.21.1
- '@algolia/transporter': 4.21.1
+ '@algolia/client-common': 4.23.0
+ '@algolia/client-search': 4.23.0
+ '@algolia/transporter': 4.23.0
dev: true
- /@algolia/client-analytics@4.21.1:
- resolution: {integrity: sha512-q6AxvAcBl4fNZXZsMwRRQXcsxUv0PK5eUAz/lHDvgkMWAg6cP7Fl+WIq0fHcG7cJA4EHf2sT5fV6Z+yUlf7NfA==}
+ /@algolia/client-analytics@4.23.0:
+ resolution: {integrity: sha512-xt3KaPdJ1vZzB3RCW8iaIE2DE5ijfxYkvjuidcriw4Ac8fTKyLYazxBk6d1ciH4Ye/M3bG1xhmZayBI/FL9P2g==}
dependencies:
- '@algolia/client-common': 4.21.1
- '@algolia/client-search': 4.21.1
- '@algolia/requester-common': 4.21.1
- '@algolia/transporter': 4.21.1
+ '@algolia/client-common': 4.23.0
+ '@algolia/client-search': 4.23.0
+ '@algolia/requester-common': 4.23.0
+ '@algolia/transporter': 4.23.0
dev: true
- /@algolia/client-common@4.21.1:
- resolution: {integrity: sha512-LOH7ncYwY/x7epOgxc/MIuV7m3qzl00wIjDG5/9rgImFpkV0X+D/ndJI9DmPsIx7yaTLd5xv/XYuKLcvrUR0eQ==}
+ /@algolia/client-common@4.23.0:
+ resolution: {integrity: sha512-S+vOOJJzpPHhn5rANDVPf7HEnLNqYf4THU+0oc7zXyR3Wa+2kBzKvn7L0kfEQzvlk7vrhArma1nVOb+zYGpCXA==}
dependencies:
- '@algolia/requester-common': 4.21.1
- '@algolia/transporter': 4.21.1
+ '@algolia/requester-common': 4.23.0
+ '@algolia/transporter': 4.23.0
dev: true
- /@algolia/client-personalization@4.21.1:
- resolution: {integrity: sha512-u2CyQjHbyVwPqM5eSXd/o+rh1Pk949P/MO6s+OxyEGg6/R2YpYvmsafVZl9Q+xqT8pFaf5QygfcqlSdMUDHV5Q==}
+ /@algolia/client-personalization@4.23.0:
+ resolution: {integrity: sha512-J8VGz8irIFl1JXJpI3MqBfChy+whMdB7fjjJ+MM4ieGy2XBer2rDsjdS+mQPtE0ASeqF6e/jil02TKZtGrdcKw==}
dependencies:
- '@algolia/client-common': 4.21.1
- '@algolia/requester-common': 4.21.1
- '@algolia/transporter': 4.21.1
+ '@algolia/client-common': 4.23.0
+ '@algolia/requester-common': 4.23.0
+ '@algolia/transporter': 4.23.0
dev: true
- /@algolia/client-search@4.21.1:
- resolution: {integrity: sha512-3KqSmMkQmF+ACY/Ms5TdcvrcK8iqgQP/N0EPnNUUP4LMUzAACpLLTdzA+AtCuc6oaz5ITtGJBVdPUljj5Jf/Lg==}
+ /@algolia/client-search@4.23.0:
+ resolution: {integrity: sha512-O/ZXOJjQrGV/84fM8C6U0wH5h21iqFC2fMsNX3KmBvoZxFidXoLtng1WasNpYXXi4U9twgGeqJD3HsV/48o08Q==}
dependencies:
- '@algolia/client-common': 4.21.1
- '@algolia/requester-common': 4.21.1
- '@algolia/transporter': 4.21.1
+ '@algolia/client-common': 4.23.0
+ '@algolia/requester-common': 4.23.0
+ '@algolia/transporter': 4.23.0
dev: true
- /@algolia/logger-common@4.21.1:
- resolution: {integrity: sha512-9AyYpR2OO9vPkkDlpTtW2/6nX+RmMd7LUwzJiAF3uN+BYUiQqgXEp+oGaH8UC0dgetmK7wJO6hw4b39cnTdEpw==}
+ /@algolia/logger-common@4.23.0:
+ resolution: {integrity: sha512-SY2GkL99QLfBDUTtgyd9ZOWB/Mz5Yr01q0WewTtlIm5hy02CZN34utIreC1A41/eswLOvJAadQsRZv1qeoMxrw==}
dev: true
- /@algolia/logger-console@4.21.1:
- resolution: {integrity: sha512-9wizQiQ8kL4DiBmT82i403UwacNuv+0hpfsfaWYZQrGjpzG+yvXETWM4AgwFZLj007esuKQiGfOPUoYFZNkGGA==}
+ /@algolia/logger-console@4.23.0:
+ resolution: {integrity: sha512-I0eXOsUiJkjHGjbE3RojH/KCkKfP2ATVrglK1GbYc84oGZ6C3lyrv4hG5o5nahmMSIow7NHYyBBDlzkv7DVVXQ==}
dependencies:
- '@algolia/logger-common': 4.21.1
+ '@algolia/logger-common': 4.23.0
dev: true
- /@algolia/requester-browser-xhr@4.21.1:
- resolution: {integrity: sha512-9NudesJLuXtRHV+JD8fTkrsdVj/oAPQbtLnxBbSQeMduzV6+a7W+G9VuWo5fwFymCdXR8/Hb6jy8D1owQIq5Gw==}
+ /@algolia/recommend@4.23.0:
+ resolution: {integrity: sha512-pnnntx5hUBVLPBXeV4yKEZ4SCNoFbklnjWc2TnUSP5GzyYuqa2n2pF9TBg+/Z9HkspkQdQm8UY142YXIaG2fcA==}
dependencies:
- '@algolia/requester-common': 4.21.1
+ '@algolia/cache-browser-local-storage': 4.23.0
+ '@algolia/cache-common': 4.23.0
+ '@algolia/cache-in-memory': 4.23.0
+ '@algolia/client-common': 4.23.0
+ '@algolia/client-search': 4.23.0
+ '@algolia/logger-common': 4.23.0
+ '@algolia/logger-console': 4.23.0
+ '@algolia/requester-browser-xhr': 4.23.0
+ '@algolia/requester-common': 4.23.0
+ '@algolia/requester-node-http': 4.23.0
+ '@algolia/transporter': 4.23.0
dev: true
- /@algolia/requester-common@4.21.1:
- resolution: {integrity: sha512-KtX2Ep3C43XxoN3xKw755cdf9enE6gPgzh6ufZQRJBl4rYCOoXbiREU6noDYX/Nq+Q+sl03V37WAp0YgtIlh9g==}
+ /@algolia/requester-browser-xhr@4.23.0:
+ resolution: {integrity: sha512-ZxJ6opz4rey1oFLgp+8cBkxIW9uiQ+zSuf9ahqj1JFOPVXBvgcNvXuvHtE+adv7thinE9m3tzp6KD00skt7GQg==}
+ dependencies:
+ '@algolia/requester-common': 4.23.0
+ dev: true
+
+ /@algolia/requester-common@4.23.0:
+ resolution: {integrity: sha512-Ol+vffP7WdMhlHreLmIgilb0pfcu+x9Ylx2iR/o7u2MtVdMrHpsgIjpy7YSCRFTS/zrGc488Y1Y9PsrAi1LB2A==}
+ dev: true
+
+ /@algolia/requester-node-http@4.23.0:
+ resolution: {integrity: sha512-Ae+Gj/LdhbKSRbmsR4w79RrojTikM4iHIdVuyxL1hkG9bZh5YU8grIlL7OPg43+SpaJE6RYa621tEJwV2cKVTA==}
+ dependencies:
+ '@algolia/requester-common': 4.23.0
dev: true
- /@algolia/requester-node-http@4.21.1:
- resolution: {integrity: sha512-EcD8cY6Bh2iMySpqXglTKU9+pt+km1ws3xF0V7CGMIUzW1HmN/ZVhi4apCBY4tEMytbyARv0XRTPsolSC4gSSw==}
+ /@algolia/transporter@4.23.0:
+ resolution: {integrity: sha512-zUOhT9LFSRZHpdbRa59yPglzHhPkO7eVdlU8kcMWZYgZ8lUcofU8jiGNH8FARzkxJSt6ZG4/MqSHKJEK3PYbPA==}
dependencies:
- '@algolia/requester-common': 4.21.1
+ '@algolia/cache-common': 4.23.0
+ '@algolia/logger-common': 4.23.0
+ '@algolia/requester-common': 4.23.0
dev: true
- /@algolia/transporter@4.21.1:
- resolution: {integrity: sha512-KGLFKz8krzOWRwcbR4FT49Grh1dES/mG8dHABEojbvrfUb6kUFxkAee/aezp2GIxuNx+gpQjRn1IzOsqbUZL0A==}
+ /@babel/code-frame@7.24.2:
+ resolution: {integrity: sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==}
+ engines: {node: '>=6.9.0'}
dependencies:
- '@algolia/cache-common': 4.21.1
- '@algolia/logger-common': 4.21.1
- '@algolia/requester-common': 4.21.1
+ '@babel/highlight': 7.24.2
+ picocolors: 1.0.0
dev: true
- /@babel/helper-string-parser@7.23.4:
- resolution: {integrity: sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==}
+ /@babel/helper-string-parser@7.24.1:
+ resolution: {integrity: sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==}
engines: {node: '>=6.9.0'}
dev: true
@@ -173,32 +203,42 @@ packages:
engines: {node: '>=6.9.0'}
dev: true
- /@babel/parser@7.23.6:
- resolution: {integrity: sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==}
+ /@babel/highlight@7.24.2:
+ resolution: {integrity: sha512-Yac1ao4flkTxTteCDZLEvdxg2fZfz1v8M4QpaGypq/WPDqg3ijHYbDfs+LG5hvzSoqaSZ9/Z9lKSP3CjZjv+pA==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/helper-validator-identifier': 7.22.20
+ chalk: 2.4.2
+ js-tokens: 4.0.0
+ picocolors: 1.0.0
+ dev: true
+
+ /@babel/parser@7.24.1:
+ resolution: {integrity: sha512-Zo9c7N3xdOIQrNip7Lc9wvRPzlRtovHVE4lkz8WEDr7uYh/GMQhSiIgFxGIArRHYdJE5kxtZjAf8rT0xhdLCzg==}
engines: {node: '>=6.0.0'}
hasBin: true
dependencies:
- '@babel/types': 7.23.6
+ '@babel/types': 7.24.0
dev: true
- /@babel/types@7.23.6:
- resolution: {integrity: sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==}
+ /@babel/types@7.24.0:
+ resolution: {integrity: sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w==}
engines: {node: '>=6.9.0'}
dependencies:
- '@babel/helper-string-parser': 7.23.4
+ '@babel/helper-string-parser': 7.24.1
'@babel/helper-validator-identifier': 7.22.20
to-fast-properties: 2.0.0
dev: true
- /@docsearch/css@3.5.2:
- resolution: {integrity: sha512-SPiDHaWKQZpwR2siD0KQUwlStvIAnEyK6tAE2h2Wuoq8ue9skzhlyVQ1ddzOxX6khULnAALDiR/isSF3bnuciA==}
+ /@docsearch/css@3.6.0:
+ resolution: {integrity: sha512-+sbxb71sWre+PwDK7X2T8+bhS6clcVMLwBPznX45Qu6opJcgRjAp7gYSDzVFp187J+feSj5dNBN1mJoi6ckkUQ==}
dev: true
- /@docsearch/js@3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0):
- resolution: {integrity: sha512-p1YFTCDflk8ieHgFJYfmyHBki1D61+U9idwrLh+GQQMrBSP3DLGKpy0XUJtPjAOPltcVbqsTjiPFfH7JImjUNg==}
+ /@docsearch/js@3.6.0(@algolia/client-search@4.23.0)(search-insights@2.13.0):
+ resolution: {integrity: sha512-QujhqINEElrkIfKwyyyTfbsfMAYCkylInLYMRqHy7PHc8xTBQCow73tlo/Kc7oIwBrCLf0P3YhjlOeV4v8hevQ==}
dependencies:
- '@docsearch/react': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0)
- preact: 10.19.3
+ '@docsearch/react': 3.6.0(@algolia/client-search@4.23.0)(search-insights@2.13.0)
+ preact: 10.20.1
transitivePeerDependencies:
- '@algolia/client-search'
- '@types/react'
@@ -207,8 +247,8 @@ packages:
- search-insights
dev: true
- /@docsearch/react@3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0):
- resolution: {integrity: sha512-9Ahcrs5z2jq/DcAvYtvlqEBHImbm4YJI8M9y0x6Tqg598P40HTEkX7hsMcIuThI+hTFxRGZ9hll0Wygm2yEjng==}
+ /@docsearch/react@3.6.0(@algolia/client-search@4.23.0)(search-insights@2.13.0):
+ resolution: {integrity: sha512-HUFut4ztcVNmqy9gp/wxNbC7pTOHhgVVkHVGCACTuLhUKUhKAF9KYHJtMiLUJxEqiFLQiuri1fWF8zqwM/cu1w==}
peerDependencies:
'@types/react': '>= 16.8.0 < 19.0.0'
react: '>= 16.8.0 < 19.0.0'
@@ -224,17 +264,26 @@ packages:
search-insights:
optional: true
dependencies:
- '@algolia/autocomplete-core': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0)
- '@algolia/autocomplete-preset-algolia': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)
- '@docsearch/css': 3.5.2
- algoliasearch: 4.21.1
+ '@algolia/autocomplete-core': 1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0)(search-insights@2.13.0)
+ '@algolia/autocomplete-preset-algolia': 1.9.3(@algolia/client-search@4.23.0)(algoliasearch@4.23.0)
+ '@docsearch/css': 3.6.0
+ algoliasearch: 4.23.0
search-insights: 2.13.0
transitivePeerDependencies:
- '@algolia/client-search'
dev: true
- /@esbuild/android-arm64@0.19.9:
- resolution: {integrity: sha512-q4cR+6ZD0938R19MyEW3jEsMzbb/1rulLXiNAJQADD/XYp7pT+rOS5JGxvpRW8dFDEfjW4wLgC/3FXIw4zYglQ==}
+ /@esbuild/aix-ppc64@0.20.2:
+ resolution: {integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==}
+ engines: {node: '>=12'}
+ cpu: [ppc64]
+ os: [aix]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /@esbuild/android-arm64@0.20.2:
+ resolution: {integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==}
engines: {node: '>=12'}
cpu: [arm64]
os: [android]
@@ -242,8 +291,8 @@ packages:
dev: true
optional: true
- /@esbuild/android-arm@0.19.9:
- resolution: {integrity: sha512-jkYjjq7SdsWuNI6b5quymW0oC83NN5FdRPuCbs9HZ02mfVdAP8B8eeqLSYU3gb6OJEaY5CQabtTFbqBf26H3GA==}
+ /@esbuild/android-arm@0.20.2:
+ resolution: {integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==}
engines: {node: '>=12'}
cpu: [arm]
os: [android]
@@ -251,8 +300,8 @@ packages:
dev: true
optional: true
- /@esbuild/android-x64@0.19.9:
- resolution: {integrity: sha512-KOqoPntWAH6ZxDwx1D6mRntIgZh9KodzgNOy5Ebt9ghzffOk9X2c1sPwtM9P+0eXbefnDhqYfkh5PLP5ULtWFA==}
+ /@esbuild/android-x64@0.20.2:
+ resolution: {integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==}
engines: {node: '>=12'}
cpu: [x64]
os: [android]
@@ -260,8 +309,8 @@ packages:
dev: true
optional: true
- /@esbuild/darwin-arm64@0.19.9:
- resolution: {integrity: sha512-KBJ9S0AFyLVx2E5D8W0vExqRW01WqRtczUZ8NRu+Pi+87opZn5tL4Y0xT0mA4FtHctd0ZgwNoN639fUUGlNIWw==}
+ /@esbuild/darwin-arm64@0.20.2:
+ resolution: {integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==}
engines: {node: '>=12'}
cpu: [arm64]
os: [darwin]
@@ -269,8 +318,8 @@ packages:
dev: true
optional: true
- /@esbuild/darwin-x64@0.19.9:
- resolution: {integrity: sha512-vE0VotmNTQaTdX0Q9dOHmMTao6ObjyPm58CHZr1UK7qpNleQyxlFlNCaHsHx6Uqv86VgPmR4o2wdNq3dP1qyDQ==}
+ /@esbuild/darwin-x64@0.20.2:
+ resolution: {integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==}
engines: {node: '>=12'}
cpu: [x64]
os: [darwin]
@@ -278,8 +327,8 @@ packages:
dev: true
optional: true
- /@esbuild/freebsd-arm64@0.19.9:
- resolution: {integrity: sha512-uFQyd/o1IjiEk3rUHSwUKkqZwqdvuD8GevWF065eqgYfexcVkxh+IJgwTaGZVu59XczZGcN/YMh9uF1fWD8j1g==}
+ /@esbuild/freebsd-arm64@0.20.2:
+ resolution: {integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==}
engines: {node: '>=12'}
cpu: [arm64]
os: [freebsd]
@@ -287,8 +336,8 @@ packages:
dev: true
optional: true
- /@esbuild/freebsd-x64@0.19.9:
- resolution: {integrity: sha512-WMLgWAtkdTbTu1AWacY7uoj/YtHthgqrqhf1OaEWnZb7PQgpt8eaA/F3LkV0E6K/Lc0cUr/uaVP/49iE4M4asA==}
+ /@esbuild/freebsd-x64@0.20.2:
+ resolution: {integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==}
engines: {node: '>=12'}
cpu: [x64]
os: [freebsd]
@@ -296,8 +345,8 @@ packages:
dev: true
optional: true
- /@esbuild/linux-arm64@0.19.9:
- resolution: {integrity: sha512-PiPblfe1BjK7WDAKR1Cr9O7VVPqVNpwFcPWgfn4xu0eMemzRp442hXyzF/fSwgrufI66FpHOEJk0yYdPInsmyQ==}
+ /@esbuild/linux-arm64@0.20.2:
+ resolution: {integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==}
engines: {node: '>=12'}
cpu: [arm64]
os: [linux]
@@ -305,8 +354,8 @@ packages:
dev: true
optional: true
- /@esbuild/linux-arm@0.19.9:
- resolution: {integrity: sha512-C/ChPohUYoyUaqn1h17m/6yt6OB14hbXvT8EgM1ZWaiiTYz7nWZR0SYmMnB5BzQA4GXl3BgBO1l8MYqL/He3qw==}
+ /@esbuild/linux-arm@0.20.2:
+ resolution: {integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==}
engines: {node: '>=12'}
cpu: [arm]
os: [linux]
@@ -314,8 +363,8 @@ packages:
dev: true
optional: true
- /@esbuild/linux-ia32@0.19.9:
- resolution: {integrity: sha512-f37i/0zE0MjDxijkPSQw1CO/7C27Eojqb+r3BbHVxMLkj8GCa78TrBZzvPyA/FNLUMzP3eyHCVkAopkKVja+6Q==}
+ /@esbuild/linux-ia32@0.20.2:
+ resolution: {integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==}
engines: {node: '>=12'}
cpu: [ia32]
os: [linux]
@@ -323,8 +372,8 @@ packages:
dev: true
optional: true
- /@esbuild/linux-loong64@0.19.9:
- resolution: {integrity: sha512-t6mN147pUIf3t6wUt3FeumoOTPfmv9Cc6DQlsVBpB7eCpLOqQDyWBP1ymXn1lDw4fNUSb/gBcKAmvTP49oIkaA==}
+ /@esbuild/linux-loong64@0.20.2:
+ resolution: {integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==}
engines: {node: '>=12'}
cpu: [loong64]
os: [linux]
@@ -332,8 +381,8 @@ packages:
dev: true
optional: true
- /@esbuild/linux-mips64el@0.19.9:
- resolution: {integrity: sha512-jg9fujJTNTQBuDXdmAg1eeJUL4Jds7BklOTkkH80ZgQIoCTdQrDaHYgbFZyeTq8zbY+axgptncko3v9p5hLZtw==}
+ /@esbuild/linux-mips64el@0.20.2:
+ resolution: {integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==}
engines: {node: '>=12'}
cpu: [mips64el]
os: [linux]
@@ -341,8 +390,8 @@ packages:
dev: true
optional: true
- /@esbuild/linux-ppc64@0.19.9:
- resolution: {integrity: sha512-tkV0xUX0pUUgY4ha7z5BbDS85uI7ABw3V1d0RNTii7E9lbmV8Z37Pup2tsLV46SQWzjOeyDi1Q7Wx2+QM8WaCQ==}
+ /@esbuild/linux-ppc64@0.20.2:
+ resolution: {integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==}
engines: {node: '>=12'}
cpu: [ppc64]
os: [linux]
@@ -350,8 +399,8 @@ packages:
dev: true
optional: true
- /@esbuild/linux-riscv64@0.19.9:
- resolution: {integrity: sha512-DfLp8dj91cufgPZDXr9p3FoR++m3ZJ6uIXsXrIvJdOjXVREtXuQCjfMfvmc3LScAVmLjcfloyVtpn43D56JFHg==}
+ /@esbuild/linux-riscv64@0.20.2:
+ resolution: {integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==}
engines: {node: '>=12'}
cpu: [riscv64]
os: [linux]
@@ -359,8 +408,8 @@ packages:
dev: true
optional: true
- /@esbuild/linux-s390x@0.19.9:
- resolution: {integrity: sha512-zHbglfEdC88KMgCWpOl/zc6dDYJvWGLiUtmPRsr1OgCViu3z5GncvNVdf+6/56O2Ca8jUU+t1BW261V6kp8qdw==}
+ /@esbuild/linux-s390x@0.20.2:
+ resolution: {integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==}
engines: {node: '>=12'}
cpu: [s390x]
os: [linux]
@@ -368,8 +417,8 @@ packages:
dev: true
optional: true
- /@esbuild/linux-x64@0.19.9:
- resolution: {integrity: sha512-JUjpystGFFmNrEHQnIVG8hKwvA2DN5o7RqiO1CVX8EN/F/gkCjkUMgVn6hzScpwnJtl2mPR6I9XV1oW8k9O+0A==}
+ /@esbuild/linux-x64@0.20.2:
+ resolution: {integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==}
engines: {node: '>=12'}
cpu: [x64]
os: [linux]
@@ -377,8 +426,8 @@ packages:
dev: true
optional: true
- /@esbuild/netbsd-x64@0.19.9:
- resolution: {integrity: sha512-GThgZPAwOBOsheA2RUlW5UeroRfESwMq/guy8uEe3wJlAOjpOXuSevLRd70NZ37ZrpO6RHGHgEHvPg1h3S1Jug==}
+ /@esbuild/netbsd-x64@0.20.2:
+ resolution: {integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==}
engines: {node: '>=12'}
cpu: [x64]
os: [netbsd]
@@ -386,8 +435,8 @@ packages:
dev: true
optional: true
- /@esbuild/openbsd-x64@0.19.9:
- resolution: {integrity: sha512-Ki6PlzppaFVbLnD8PtlVQfsYw4S9n3eQl87cqgeIw+O3sRr9IghpfSKY62mggdt1yCSZ8QWvTZ9jo9fjDSg9uw==}
+ /@esbuild/openbsd-x64@0.20.2:
+ resolution: {integrity: sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==}
engines: {node: '>=12'}
cpu: [x64]
os: [openbsd]
@@ -395,8 +444,8 @@ packages:
dev: true
optional: true
- /@esbuild/sunos-x64@0.19.9:
- resolution: {integrity: sha512-MLHj7k9hWh4y1ddkBpvRj2b9NCBhfgBt3VpWbHQnXRedVun/hC7sIyTGDGTfsGuXo4ebik2+3ShjcPbhtFwWDw==}
+ /@esbuild/sunos-x64@0.20.2:
+ resolution: {integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==}
engines: {node: '>=12'}
cpu: [x64]
os: [sunos]
@@ -404,8 +453,8 @@ packages:
dev: true
optional: true
- /@esbuild/win32-arm64@0.19.9:
- resolution: {integrity: sha512-GQoa6OrQ8G08guMFgeXPH7yE/8Dt0IfOGWJSfSH4uafwdC7rWwrfE6P9N8AtPGIjUzdo2+7bN8Xo3qC578olhg==}
+ /@esbuild/win32-arm64@0.20.2:
+ resolution: {integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==}
engines: {node: '>=12'}
cpu: [arm64]
os: [win32]
@@ -413,8 +462,8 @@ packages:
dev: true
optional: true
- /@esbuild/win32-ia32@0.19.9:
- resolution: {integrity: sha512-UOozV7Ntykvr5tSOlGCrqU3NBr3d8JqPes0QWN2WOXfvkWVGRajC+Ym0/Wj88fUgecUCLDdJPDF0Nna2UK3Qtg==}
+ /@esbuild/win32-ia32@0.20.2:
+ resolution: {integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==}
engines: {node: '>=12'}
cpu: [ia32]
os: [win32]
@@ -422,8 +471,8 @@ packages:
dev: true
optional: true
- /@esbuild/win32-x64@0.19.9:
- resolution: {integrity: sha512-oxoQgglOP7RH6iasDrhY+R/3cHrfwIDvRlT4CGChflq6twk8iENeVvMJjmvBb94Ik1Z+93iGO27err7w6l54GQ==}
+ /@esbuild/win32-x64@0.20.2:
+ resolution: {integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==}
engines: {node: '>=12'}
cpu: [x64]
os: [win32]
@@ -431,71 +480,120 @@ packages:
dev: true
optional: true
+ /@isaacs/cliui@8.0.2:
+ resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==}
+ engines: {node: '>=12'}
+ dependencies:
+ string-width: 5.1.2
+ string-width-cjs: /string-width@4.2.3
+ strip-ansi: 7.1.0
+ strip-ansi-cjs: /strip-ansi@6.0.1
+ wrap-ansi: 8.1.0
+ wrap-ansi-cjs: /wrap-ansi@7.0.0
+ dev: true
+
+ /@jridgewell/gen-mapping@0.3.5:
+ resolution: {integrity: sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ '@jridgewell/set-array': 1.2.1
+ '@jridgewell/sourcemap-codec': 1.4.15
+ '@jridgewell/trace-mapping': 0.3.25
+ dev: true
+
+ /@jridgewell/resolve-uri@3.1.2:
+ resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==}
+ engines: {node: '>=6.0.0'}
+ dev: true
+
+ /@jridgewell/set-array@1.2.1:
+ resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==}
+ engines: {node: '>=6.0.0'}
+ dev: true
+
+ /@jridgewell/source-map@0.3.6:
+ resolution: {integrity: sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==}
+ dependencies:
+ '@jridgewell/gen-mapping': 0.3.5
+ '@jridgewell/trace-mapping': 0.3.25
+ dev: true
+
/@jridgewell/sourcemap-codec@1.4.15:
resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==}
dev: true
- /@mdit-vue/plugin-component@1.0.0:
- resolution: {integrity: sha512-ZXsJwxkG5yyTHARIYbR74cT4AZ0SfMokFFjiHYCbypHIeYWgJhso4+CZ8+3V9EWFG3EHlGoKNGqKp9chHnqntQ==}
+ /@jridgewell/trace-mapping@0.3.25:
+ resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==}
+ dependencies:
+ '@jridgewell/resolve-uri': 3.1.2
+ '@jridgewell/sourcemap-codec': 1.4.15
+ dev: true
+
+ /@leichtgewicht/ip-codec@2.0.4:
+ resolution: {integrity: sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==}
+ dev: true
+
+ /@mdit-vue/plugin-component@2.0.0:
+ resolution: {integrity: sha512-cTRxlocav/+mfgDcp0P2z/gWuWBez+iNuN4D+b74LpX4AR6UAx2ZvWtCrUZ8VXrO4eCt1/G0YC/Af7mpIb3aoQ==}
dependencies:
'@types/markdown-it': 13.0.7
- markdown-it: 13.0.2
+ markdown-it: 14.1.0
dev: true
- /@mdit-vue/plugin-frontmatter@1.0.0:
- resolution: {integrity: sha512-MMA7Ny+YPZA7eDOY1t4E+rKuEWO39mzDdP/M68fKdXJU6VfcGkPr7gnpnJfW2QBJ5qIvMrK/3lDAA2JBy5TfpA==}
+ /@mdit-vue/plugin-frontmatter@2.0.0:
+ resolution: {integrity: sha512-/LrT6E60QI4XV4mqx3J87hqYXlR7ZyMvndmftR2RGz7cRAwa/xL+kyFLlgrMxkBIKitOShKa3LS/9Ov9b0fU+g==}
dependencies:
- '@mdit-vue/types': 1.0.0
+ '@mdit-vue/types': 2.0.0
'@types/markdown-it': 13.0.7
gray-matter: 4.0.3
- markdown-it: 13.0.2
+ markdown-it: 14.1.0
dev: true
- /@mdit-vue/plugin-headers@1.0.0:
- resolution: {integrity: sha512-0rK/iKy6x13d/Pp5XxdLBshTD0+YjZvtHIaIV+JO+/H2WnOv7oaRgs48G5d44z3XJVUE2u6fNnTlI169fef0/A==}
+ /@mdit-vue/plugin-headers@2.0.0:
+ resolution: {integrity: sha512-ITMMPCnLEYHHgj3XEUL2l75jsNn8guxNqr26YrMSi1f5zcgq4XVy1LIvfwvJ1puqM6Cc5v4BHk3oAyorAi7l1A==}
dependencies:
- '@mdit-vue/shared': 1.0.0
- '@mdit-vue/types': 1.0.0
+ '@mdit-vue/shared': 2.0.0
+ '@mdit-vue/types': 2.0.0
'@types/markdown-it': 13.0.7
- markdown-it: 13.0.2
+ markdown-it: 14.1.0
dev: true
- /@mdit-vue/plugin-sfc@1.0.0:
- resolution: {integrity: sha512-agMUe0fY4YHxsZivSvplBwRwrFvsIf/JNUJCAYq1+2Sg9+2hviTBZwjZDxYqHDHOVLtiNr+wuo68tE24mAx3AQ==}
+ /@mdit-vue/plugin-sfc@2.0.0:
+ resolution: {integrity: sha512-OXrMXOyk0iwdIou2jRoIHIbjskwghkO14C9/OjgVHXSSX+iM/WQ4l4yi1aWmNlbQNjtP8IXcVAyJB9K0DFYmLg==}
dependencies:
- '@mdit-vue/types': 1.0.0
+ '@mdit-vue/types': 2.0.0
'@types/markdown-it': 13.0.7
- markdown-it: 13.0.2
+ markdown-it: 14.1.0
dev: true
- /@mdit-vue/plugin-title@1.0.0:
- resolution: {integrity: sha512-8yC60fCZ95xcJ/cvJH4Lv43Rs4k+33UGyKrRWj5J8TNyMwUyGcwur0XyPM+ffJH4/Bzq4myZLsj/TTFSkXRxvw==}
+ /@mdit-vue/plugin-title@2.0.0:
+ resolution: {integrity: sha512-eqBoETPVkMXNLvwFshz/A2+Cz81VB5HEkXDm0tt6RBW/rTvnoWmGJ1Z+mvcjR5ck5W4nYdIyT68oHxX2JI2M4g==}
dependencies:
- '@mdit-vue/shared': 1.0.0
- '@mdit-vue/types': 1.0.0
+ '@mdit-vue/shared': 2.0.0
+ '@mdit-vue/types': 2.0.0
'@types/markdown-it': 13.0.7
- markdown-it: 13.0.2
+ markdown-it: 14.1.0
dev: true
- /@mdit-vue/plugin-toc@1.0.0:
- resolution: {integrity: sha512-WN8blfX0X/5Nolic0ClDWP7eVo9IB+U4g0jbycX3lolIZX5Bai1UpsD3QYZr5VVsPbQJMKMGvTrCEtCNTGvyWQ==}
+ /@mdit-vue/plugin-toc@2.0.0:
+ resolution: {integrity: sha512-PKQ8sZna3D5chTnt2lxL+ddpyXd++6Nyc0l8VXCeDgStlySQwiP9jaLeeC88oqY4BtRu4cAmILmxDrvuX0Rrdg==}
dependencies:
- '@mdit-vue/shared': 1.0.0
- '@mdit-vue/types': 1.0.0
+ '@mdit-vue/shared': 2.0.0
+ '@mdit-vue/types': 2.0.0
'@types/markdown-it': 13.0.7
- markdown-it: 13.0.2
+ markdown-it: 14.1.0
dev: true
- /@mdit-vue/shared@1.0.0:
- resolution: {integrity: sha512-nbYBfmEi+pR2Lm0Z6TMVX2/iBjfr/kGEsHW8CC0rQw+3+sG5dY6VG094HuFAkiAmmvZx9DZZb+7ZMWp9vkwCRw==}
+ /@mdit-vue/shared@2.0.0:
+ resolution: {integrity: sha512-PdxpQpbyTazeo2JT87qms6RPZIzyJd+gwuB+1jSwLDI7+0u5g79y2XgTAbZromSVgY2f3UU5HWdwaLbV9w4uOw==}
dependencies:
- '@mdit-vue/types': 1.0.0
+ '@mdit-vue/types': 2.0.0
'@types/markdown-it': 13.0.7
- markdown-it: 13.0.2
+ markdown-it: 14.1.0
dev: true
- /@mdit-vue/types@1.0.0:
- resolution: {integrity: sha512-xeF5+sHLzRNF7plbksywKCph4qli20l72of2fMlZQQ7RECvXYrRkE9+bjRFQCyULC7B8ydUYbpbkux5xJlVWyw==}
+ /@mdit-vue/types@2.0.0:
+ resolution: {integrity: sha512-1BeEB+DbtmDMUAfvbNUj5Hso8cSl2sBVK2iTyOMAqhfDVLdh+/9+D0JmQHaCeUk/vuJoMhOwbweZvh55wHxm4w==}
dev: true
/@nodelib/fs.scandir@2.1.5:
@@ -516,139 +614,122 @@ packages:
engines: {node: '>= 8'}
dependencies:
'@nodelib/fs.scandir': 2.1.5
- fastq: 1.15.0
- dev: true
-
- /@rollup/rollup-android-arm-eabi@4.8.0:
- resolution: {integrity: sha512-zdTObFRoNENrdPpnTNnhOljYIcOX7aI7+7wyrSpPFFIOf/nRdedE6IYsjaBE7tjukphh1tMTojgJ7p3lKY8x6Q==}
- cpu: [arm]
- os: [android]
- requiresBuild: true
- dev: true
- optional: true
-
- /@rollup/rollup-android-arm64@4.8.0:
- resolution: {integrity: sha512-aiItwP48BiGpMFS9Znjo/xCNQVwTQVcRKkFKsO81m8exrGjHkCBDvm9PHay2kpa8RPnZzzKcD1iQ9KaLY4fPQQ==}
- cpu: [arm64]
- os: [android]
- requiresBuild: true
+ fastq: 1.17.1
dev: true
- optional: true
-
- /@rollup/rollup-darwin-arm64@4.8.0:
- resolution: {integrity: sha512-zhNIS+L4ZYkYQUjIQUR6Zl0RXhbbA0huvNIWjmPc2SL0cB1h5Djkcy+RZ3/Bwszfb6vgwUvcVJYD6e6Zkpsi8g==}
- cpu: [arm64]
- os: [darwin]
- requiresBuild: true
- dev: true
- optional: true
- /@rollup/rollup-darwin-x64@4.8.0:
- resolution: {integrity: sha512-A/FAHFRNQYrELrb/JHncRWzTTXB2ticiRFztP4ggIUAfa9Up1qfW8aG2w/mN9jNiZ+HB0t0u0jpJgFXG6BfRTA==}
- cpu: [x64]
- os: [darwin]
+ /@pkgjs/parseargs@0.11.0:
+ resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==}
+ engines: {node: '>=14'}
requiresBuild: true
dev: true
optional: true
- /@rollup/rollup-linux-arm-gnueabihf@4.8.0:
- resolution: {integrity: sha512-JsidBnh3p2IJJA4/2xOF2puAYqbaczB3elZDT0qHxn362EIoIkq7hrR43Xa8RisgI6/WPfvb2umbGsuvf7E37A==}
- cpu: [arm]
- os: [linux]
- requiresBuild: true
+ /@sindresorhus/merge-streams@2.3.0:
+ resolution: {integrity: sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==}
+ engines: {node: '>=18'}
dev: true
- optional: true
- /@rollup/rollup-linux-arm64-gnu@4.8.0:
- resolution: {integrity: sha512-hBNCnqw3EVCkaPB0Oqd24bv8SklETptQWcJz06kb9OtiShn9jK1VuTgi7o4zPSt6rNGWQOTDEAccbk0OqJmS+g==}
- cpu: [arm64]
- os: [linux]
- requiresBuild: true
+ /@types/body-parser@1.19.5:
+ resolution: {integrity: sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==}
+ dependencies:
+ '@types/connect': 3.4.38
+ '@types/node': 20.11.30
dev: true
- optional: true
- /@rollup/rollup-linux-arm64-musl@4.8.0:
- resolution: {integrity: sha512-Fw9ChYfJPdltvi9ALJ9wzdCdxGw4wtq4t1qY028b2O7GwB5qLNSGtqMsAel1lfWTZvf4b6/+4HKp0GlSYg0ahA==}
- cpu: [arm64]
- os: [linux]
- requiresBuild: true
+ /@types/bonjour@3.5.13:
+ resolution: {integrity: sha512-z9fJ5Im06zvUL548KvYNecEVlA7cVDkGUi6kZusb04mpyEFKCIZJvloCcmpmLaIahDpOQGHaHmG6imtPMmPXGQ==}
+ dependencies:
+ '@types/node': 20.11.30
dev: true
- optional: true
- /@rollup/rollup-linux-riscv64-gnu@4.8.0:
- resolution: {integrity: sha512-BH5xIh7tOzS9yBi8dFrCTG8Z6iNIGWGltd3IpTSKp6+pNWWO6qy8eKoRxOtwFbMrid5NZaidLYN6rHh9aB8bEw==}
- cpu: [riscv64]
- os: [linux]
- requiresBuild: true
+ /@types/connect-history-api-fallback@1.5.4:
+ resolution: {integrity: sha512-n6Cr2xS1h4uAulPRdlw6Jl6s1oG8KrVilPN2yUITEs+K48EzMJJ3W1xy8K5eWuFvjp3R74AOIGSmp2UfBJ8HFw==}
+ dependencies:
+ '@types/express-serve-static-core': 4.17.43
+ '@types/node': 20.11.30
dev: true
- optional: true
- /@rollup/rollup-linux-x64-gnu@4.8.0:
- resolution: {integrity: sha512-PmvAj8k6EuWiyLbkNpd6BLv5XeYFpqWuRvRNRl80xVfpGXK/z6KYXmAgbI4ogz7uFiJxCnYcqyvZVD0dgFog7Q==}
- cpu: [x64]
- os: [linux]
- requiresBuild: true
+ /@types/connect@3.4.38:
+ resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==}
+ dependencies:
+ '@types/node': 20.11.30
dev: true
- optional: true
- /@rollup/rollup-linux-x64-musl@4.8.0:
- resolution: {integrity: sha512-mdxnlW2QUzXwY+95TuxZ+CurrhgrPAMveDWI97EQlA9bfhR8tw3Pt7SUlc/eSlCNxlWktpmT//EAA8UfCHOyXg==}
- cpu: [x64]
- os: [linux]
- requiresBuild: true
+ /@types/debug@4.1.12:
+ resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==}
+ dependencies:
+ '@types/ms': 0.7.34
dev: true
- optional: true
- /@rollup/rollup-win32-arm64-msvc@4.8.0:
- resolution: {integrity: sha512-ge7saUz38aesM4MA7Cad8CHo0Fyd1+qTaqoIo+Jtk+ipBi4ATSrHWov9/S4u5pbEQmLjgUjB7BJt+MiKG2kzmA==}
- cpu: [arm64]
- os: [win32]
- requiresBuild: true
+ /@types/eslint-scope@3.7.7:
+ resolution: {integrity: sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==}
+ dependencies:
+ '@types/eslint': 8.56.6
+ '@types/estree': 1.0.5
dev: true
- optional: true
- /@rollup/rollup-win32-ia32-msvc@4.8.0:
- resolution: {integrity: sha512-p9E3PZlzurhlsN5h9g7zIP1DnqKXJe8ZUkFwAazqSvHuWfihlIISPxG9hCHCoA+dOOspL/c7ty1eeEVFTE0UTw==}
- cpu: [ia32]
- os: [win32]
- requiresBuild: true
+ /@types/eslint@8.56.6:
+ resolution: {integrity: sha512-ymwc+qb1XkjT/gfoQwxIeHZ6ixH23A+tCT2ADSA/DPVKzAjwYkTXBMCQ/f6fe4wEa85Lhp26VPeUxI7wMhAi7A==}
+ dependencies:
+ '@types/estree': 1.0.5
+ '@types/json-schema': 7.0.15
dev: true
- optional: true
- /@rollup/rollup-win32-x64-msvc@4.8.0:
- resolution: {integrity: sha512-kb4/auKXkYKqlUYTE8s40FcJIj5soOyRLHKd4ugR0dCq0G2EfcF54eYcfQiGkHzjidZ40daB4ulsFdtqNKZtBg==}
- cpu: [x64]
- os: [win32]
- requiresBuild: true
+ /@types/estree@1.0.5:
+ resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==}
dev: true
- optional: true
- /@sindresorhus/merge-streams@1.0.0:
- resolution: {integrity: sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==}
- engines: {node: '>=18'}
+ /@types/express-serve-static-core@4.17.43:
+ resolution: {integrity: sha512-oaYtiBirUOPQGSWNGPWnzyAFJ0BP3cwvN4oWZQY+zUBwpVIGsKUkpBpSztp74drYcjavs7SKFZ4DX1V2QeN8rg==}
+ dependencies:
+ '@types/node': 20.11.30
+ '@types/qs': 6.9.14
+ '@types/range-parser': 1.2.7
+ '@types/send': 0.17.4
dev: true
- /@types/debug@4.1.12:
- resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==}
+ /@types/express@4.17.21:
+ resolution: {integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==}
dependencies:
- '@types/ms': 0.7.34
+ '@types/body-parser': 1.19.5
+ '@types/express-serve-static-core': 4.17.43
+ '@types/qs': 6.9.14
+ '@types/serve-static': 1.15.5
dev: true
/@types/fs-extra@11.0.4:
resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==}
dependencies:
'@types/jsonfile': 6.1.4
- '@types/node': 20.10.4
+ '@types/node': 20.11.30
dev: true
/@types/hash-sum@1.0.2:
resolution: {integrity: sha512-UP28RddqY8xcU0SCEp9YKutQICXpaAq9N8U2klqF5hegGha7KzTOL8EdhIIV3bOSGBzjEpN9bU/d+nNZBdJYVw==}
dev: true
+ /@types/html-minifier-terser@6.1.0:
+ resolution: {integrity: sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==}
+ dev: true
+
+ /@types/http-errors@2.0.4:
+ resolution: {integrity: sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==}
+ dev: true
+
+ /@types/http-proxy@1.17.14:
+ resolution: {integrity: sha512-SSrD0c1OQzlFX7pGu1eXxSEjemej64aaNPRhhVYUGqXh0BtldAAx37MG8btcumvpgKyZp1F5Gn3JkktdxiFv6w==}
+ dependencies:
+ '@types/node': 20.11.30
+ dev: true
+
+ /@types/json-schema@7.0.15:
+ resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
+ dev: true
+
/@types/jsonfile@6.1.4:
resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==}
dependencies:
- '@types/node': 20.10.4
+ '@types/node': 20.11.30
dev: true
/@types/linkify-it@3.0.5:
@@ -672,284 +753,354 @@ packages:
resolution: {integrity: sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA==}
dev: true
+ /@types/mime@1.3.5:
+ resolution: {integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==}
+ dev: true
+
+ /@types/mime@3.0.4:
+ resolution: {integrity: sha512-iJt33IQnVRkqeqC7PzBHPTC6fDlRNRW8vjrgqtScAhrmMwe8c4Eo7+fUGTa+XdWrpEgpyKWMYmi2dIwMAYRzPw==}
+ dev: true
+
/@types/ms@0.7.34:
resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==}
dev: true
+ /@types/node-forge@1.3.11:
+ resolution: {integrity: sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==}
+ dependencies:
+ '@types/node': 20.11.30
+ dev: true
+
/@types/node@17.0.45:
resolution: {integrity: sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==}
dev: true
- /@types/node@20.10.4:
- resolution: {integrity: sha512-D08YG6rr8X90YB56tSIuBaddy/UXAA9RKJoFvrsnogAum/0pmjkgi4+2nx96A330FmioegBWmEYQ+syqCFaveg==}
+ /@types/node@20.11.30:
+ resolution: {integrity: sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==}
dependencies:
undici-types: 5.26.5
dev: true
+ /@types/qs@6.9.14:
+ resolution: {integrity: sha512-5khscbd3SwWMhFqylJBLQ0zIu7c1K6Vz0uBIt915BI3zV0q1nfjRQD3RqSBcPaO6PHEF4ov/t9y89fSiyThlPA==}
+ dev: true
+
+ /@types/range-parser@1.2.7:
+ resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==}
+ dev: true
+
+ /@types/retry@0.12.2:
+ resolution: {integrity: sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==}
+ dev: true
+
/@types/sax@1.2.7:
resolution: {integrity: sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==}
dependencies:
- '@types/node': 20.10.4
+ '@types/node': 20.11.30
dev: true
- /@types/web-bluetooth@0.0.20:
- resolution: {integrity: sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==}
+ /@types/send@0.17.4:
+ resolution: {integrity: sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==}
+ dependencies:
+ '@types/mime': 1.3.5
+ '@types/node': 20.11.30
dev: true
- /@vitejs/plugin-vue@4.5.2(vite@5.0.12)(vue@3.3.11):
- resolution: {integrity: sha512-UGR3DlzLi/SaVBPX0cnSyE37vqxU3O6chn8l0HJNzQzDia6/Au2A4xKv+iIJW8w2daf80G7TYHhi1pAUjdZ0bQ==}
- engines: {node: ^14.18.0 || >=16.0.0}
- peerDependencies:
- vite: ^4.0.0 || ^5.0.0
- vue: ^3.2.25
+ /@types/serve-index@1.9.4:
+ resolution: {integrity: sha512-qLpGZ/c2fhSs5gnYsQxtDEq3Oy8SXPClIXkW5ghvAvsNuVSA8k+gCONcUCS/UjLEYvYps+e8uBtfgXgvhwfNug==}
dependencies:
- vite: 5.0.12
- vue: 3.3.11
+ '@types/express': 4.17.21
dev: true
- /@vue/compiler-core@3.3.11:
- resolution: {integrity: sha512-h97/TGWBilnLuRaj58sxNrsUU66fwdRKLOLQ9N/5iNDfp+DZhYH9Obhe0bXxhedl8fjAgpRANpiZfbgWyruQ0w==}
+ /@types/serve-static@1.15.5:
+ resolution: {integrity: sha512-PDRk21MnK70hja/YF8AHfC7yIsiQHn1rcXx7ijCFBX/k+XQJhQT/gw3xekXKJvx+5SXaMMS8oqQy09Mzvz2TuQ==}
dependencies:
- '@babel/parser': 7.23.6
- '@vue/shared': 3.3.11
- estree-walker: 2.0.2
- source-map-js: 1.0.2
+ '@types/http-errors': 2.0.4
+ '@types/mime': 3.0.4
+ '@types/node': 20.11.30
dev: true
- /@vue/compiler-dom@3.3.11:
- resolution: {integrity: sha512-zoAiUIqSKqAJ81WhfPXYmFGwDRuO+loqLxvXmfUdR5fOitPoUiIeFI9cTTyv9MU5O1+ZZglJVTusWzy+wfk5hw==}
+ /@types/sockjs@0.3.36:
+ resolution: {integrity: sha512-MK9V6NzAS1+Ud7JV9lJLFqW85VbC9dq3LmwZCuBe4wBDgKC0Kj/jd8Xl+nSviU+Qc3+m7umHHyHg//2KSa0a0Q==}
dependencies:
- '@vue/compiler-core': 3.3.11
- '@vue/shared': 3.3.11
+ '@types/node': 20.11.30
dev: true
- /@vue/compiler-sfc@3.3.11:
- resolution: {integrity: sha512-U4iqPlHO0KQeK1mrsxCN0vZzw43/lL8POxgpzcJweopmqtoYy9nljJzWDIQS3EfjiYhfdtdk9Gtgz7MRXnz3GA==}
- dependencies:
- '@babel/parser': 7.23.6
- '@vue/compiler-core': 3.3.11
- '@vue/compiler-dom': 3.3.11
- '@vue/compiler-ssr': 3.3.11
- '@vue/reactivity-transform': 3.3.11
- '@vue/shared': 3.3.11
- estree-walker: 2.0.2
- magic-string: 0.30.5
- postcss: 8.4.32
- source-map-js: 1.0.2
+ /@types/web-bluetooth@0.0.20:
+ resolution: {integrity: sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==}
dev: true
- /@vue/compiler-ssr@3.3.11:
- resolution: {integrity: sha512-Zd66ZwMvndxRTgVPdo+muV4Rv9n9DwQ4SSgWWKWkPFebHQfVYRrVjeygmmDmPewsHyznCNvJ2P2d6iOOhdv8Qg==}
- dependencies:
- '@vue/compiler-dom': 3.3.11
- '@vue/shared': 3.3.11
+ /@types/webpack-env@1.18.4:
+ resolution: {integrity: sha512-I6e+9+HtWADAWeeJWDFQtdk4EVSAbj6Rtz4q8fJ7mSr1M0jzlFcs8/HZ+Xb5SHzVm1dxH7aUiI+A8kA8Gcrm0A==}
dev: true
- /@vue/devtools-api@6.5.1:
- resolution: {integrity: sha512-+KpckaAQyfbvshdDW5xQylLni1asvNSGme1JFs8I1+/H5pHEhqUKMEQD/qn3Nx5+/nycBq11qAEi8lk+LXI2dA==}
+ /@types/ws@8.5.10:
+ resolution: {integrity: sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==}
+ dependencies:
+ '@types/node': 20.11.30
dev: true
- /@vue/reactivity-transform@3.3.11:
- resolution: {integrity: sha512-fPGjH0wqJo68A0wQ1k158utDq/cRyZNlFoxGwNScE28aUFOKFEnCBsvyD8jHn+0kd0UKVpuGuaZEQ6r9FJRqCg==}
+ /@vue/compiler-core@3.4.21:
+ resolution: {integrity: sha512-MjXawxZf2SbZszLPYxaFCjxfibYrzr3eYbKxwpLR9EQN+oaziSu3qKVbwBERj1IFIB8OLUewxB5m/BFzi613og==}
dependencies:
- '@babel/parser': 7.23.6
- '@vue/compiler-core': 3.3.11
- '@vue/shared': 3.3.11
+ '@babel/parser': 7.24.1
+ '@vue/shared': 3.4.21
+ entities: 4.5.0
estree-walker: 2.0.2
- magic-string: 0.30.5
+ source-map-js: 1.2.0
dev: true
- /@vue/reactivity@3.3.11:
- resolution: {integrity: sha512-D5tcw091f0nuu+hXq5XANofD0OXnBmaRqMYl5B3fCR+mX+cXJIGNw/VNawBqkjLNWETrFW0i+xH9NvDbTPVh7g==}
+ /@vue/compiler-dom@3.4.21:
+ resolution: {integrity: sha512-IZC6FKowtT1sl0CR5DpXSiEB5ayw75oT2bma1BEhV7RRR1+cfwLrxc2Z8Zq/RGFzJ8w5r9QtCOvTjQgdn0IKmA==}
dependencies:
- '@vue/shared': 3.3.11
+ '@vue/compiler-core': 3.4.21
+ '@vue/shared': 3.4.21
dev: true
- /@vue/runtime-core@3.3.11:
- resolution: {integrity: sha512-g9ztHGwEbS5RyWaOpXuyIVFTschclnwhqEbdy5AwGhYOgc7m/q3NFwr50MirZwTTzX55JY8pSkeib9BX04NIpw==}
+ /@vue/compiler-sfc@3.4.21:
+ resolution: {integrity: sha512-me7epoTxYlY+2CUM7hy9PCDdpMPfIwrOvAXud2Upk10g4YLv9UBW7kL798TvMeDhPthkZ0CONNrK2GoeI1ODiQ==}
dependencies:
- '@vue/reactivity': 3.3.11
- '@vue/shared': 3.3.11
+ '@babel/parser': 7.24.1
+ '@vue/compiler-core': 3.4.21
+ '@vue/compiler-dom': 3.4.21
+ '@vue/compiler-ssr': 3.4.21
+ '@vue/shared': 3.4.21
+ estree-walker: 2.0.2
+ magic-string: 0.30.8
+ postcss: 8.4.38
+ source-map-js: 1.2.0
dev: true
- /@vue/runtime-dom@3.3.11:
- resolution: {integrity: sha512-OlhtV1PVpbgk+I2zl+Y5rQtDNcCDs12rsRg71XwaA2/Rbllw6mBLMi57VOn8G0AjOJ4Mdb4k56V37+g8ukShpQ==}
+ /@vue/compiler-ssr@3.4.21:
+ resolution: {integrity: sha512-M5+9nI2lPpAsgXOGQobnIueVqc9sisBFexh5yMIMRAPYLa7+5wEJs8iqOZc1WAa9WQbx9GR2twgznU8LTIiZ4Q==}
dependencies:
- '@vue/runtime-core': 3.3.11
- '@vue/shared': 3.3.11
- csstype: 3.1.3
+ '@vue/compiler-dom': 3.4.21
+ '@vue/shared': 3.4.21
dev: true
- /@vue/server-renderer@3.3.11(vue@3.3.11):
- resolution: {integrity: sha512-AIWk0VwwxCAm4wqtJyxBylRTXSy1wCLOKbWxHaHiu14wjsNYtiRCSgVuqEPVuDpErOlRdNnuRgipQfXRLjLN5A==}
- peerDependencies:
- vue: 3.3.11
+ /@vue/devtools-api@6.6.1:
+ resolution: {integrity: sha512-LgPscpE3Vs0x96PzSSB4IGVSZXZBZHpfxs+ZA1d+VEPwHdOXowy/Y2CsvCAIFrf+ssVU1pD1jidj505EpUnfbA==}
+ dev: true
+
+ /@vue/reactivity@3.4.21:
+ resolution: {integrity: sha512-UhenImdc0L0/4ahGCyEzc/pZNwVgcglGy9HVzJ1Bq2Mm9qXOpP8RyNTjookw/gOCUlXSEtuZ2fUg5nrHcoqJcw==}
dependencies:
- '@vue/compiler-ssr': 3.3.11
- '@vue/shared': 3.3.11
- vue: 3.3.11
+ '@vue/shared': 3.4.21
dev: true
- /@vue/shared@3.3.11:
- resolution: {integrity: sha512-u2G8ZQ9IhMWTMXaWqZycnK4UthG1fA238CD+DP4Dm4WJi5hdUKKLg0RMRaRpDPNMdkTwIDkp7WtD0Rd9BH9fLw==}
+ /@vue/runtime-core@3.4.21:
+ resolution: {integrity: sha512-pQthsuYzE1XcGZznTKn73G0s14eCJcjaLvp3/DKeYWoFacD9glJoqlNBxt3W2c5S40t6CCcpPf+jG01N3ULyrA==}
+ dependencies:
+ '@vue/reactivity': 3.4.21
+ '@vue/shared': 3.4.21
dev: true
- /@vuepress/bundler-vite@2.0.0-rc.0:
- resolution: {integrity: sha512-rX8S8IYpqqlJfNPstS/joorpxXx/4WuE7+gDM31i2HUrxOKGZVzq8ZsRRRU2UdoTwHZSd3LpUS4sMtxE5xLK1A==}
+ /@vue/runtime-dom@3.4.21:
+ resolution: {integrity: sha512-gvf+C9cFpevsQxbkRBS1NpU8CqxKw0ebqMvLwcGQrNpx6gqRDodqKqA+A2VZZpQ9RpK2f9yfg8VbW/EpdFUOJw==}
dependencies:
- '@vitejs/plugin-vue': 4.5.2(vite@5.0.12)(vue@3.3.11)
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- autoprefixer: 10.4.16(postcss@8.4.32)
- connect-history-api-fallback: 2.0.0
- postcss: 8.4.32
- postcss-load-config: 4.0.2(postcss@8.4.32)
- rollup: 4.8.0
- vite: 5.0.12
- vue: 3.3.11
- vue-router: 4.2.5(vue@3.3.11)
+ '@vue/runtime-core': 3.4.21
+ '@vue/shared': 3.4.21
+ csstype: 3.1.3
+ dev: true
+
+ /@vue/server-renderer@3.4.21(vue@3.4.21):
+ resolution: {integrity: sha512-aV1gXyKSN6Rz+6kZ6kr5+Ll14YzmIbeuWe7ryJl5muJ4uwSwY/aStXTixx76TwkZFJLm1aAlA/HSWEJ4EyiMkg==}
+ peerDependencies:
+ vue: 3.4.21
+ dependencies:
+ '@vue/compiler-ssr': 3.4.21
+ '@vue/shared': 3.4.21
+ vue: 3.4.21
+ dev: true
+
+ /@vue/shared@3.4.21:
+ resolution: {integrity: sha512-PuJe7vDIi6VYSinuEbUIQgMIRZGgM8e4R+G+/dQTk0X1NEdvgvvgv7m+rfmDH1gZzyA1OjjoWskvHlfRNfQf3g==}
+ dev: true
+
+ /@vuepress/bundler-webpack@2.0.0-rc.9:
+ resolution: {integrity: sha512-Vf1QPzmB3hCzOYkCkjMfVsuo3TBNx5E0cGwFeiJGqH/f/+W6eOx0OYn8JrubZaByw+g2OSwbdM0tjzCXS2yHHA==}
+ dependencies:
+ '@types/express': 4.17.21
+ '@types/webpack-env': 1.18.4
+ '@vuepress/client': 2.0.0-rc.9
+ '@vuepress/core': 2.0.0-rc.9
+ '@vuepress/shared': 2.0.0-rc.9
+ '@vuepress/utils': 2.0.0-rc.9
+ autoprefixer: 10.4.19(postcss@8.4.38)
+ chokidar: 3.6.0
+ copy-webpack-plugin: 12.0.2(webpack@5.91.0)
+ css-loader: 6.10.0(webpack@5.91.0)
+ esbuild-loader: 4.1.0(webpack@5.91.0)
+ express: 4.19.2
+ html-webpack-plugin: 5.6.0(webpack@5.91.0)
+ mini-css-extract-plugin: 2.8.1(webpack@5.91.0)
+ postcss: 8.4.38
+ postcss-csso: 6.0.1(postcss@8.4.38)
+ postcss-loader: 8.1.1(postcss@8.4.38)(webpack@5.91.0)
+ style-loader: 3.3.4(webpack@5.91.0)
+ vue: 3.4.21
+ vue-loader: 17.4.2(vue@3.4.21)(webpack@5.91.0)
+ vue-router: 4.3.0(vue@3.4.21)
+ webpack: 5.91.0
+ webpack-chain: 6.5.1
+ webpack-dev-server: 5.0.4(webpack@5.91.0)
+ webpack-merge: 5.10.0
transitivePeerDependencies:
- - '@types/node'
- - '@vue/composition-api'
- - less
- - lightningcss
- - sass
- - stylus
- - sugarss
+ - '@rspack/core'
+ - '@swc/core'
+ - '@vue/compiler-sfc'
+ - bufferutil
+ - debug
+ - esbuild
- supports-color
- - terser
- - ts-node
- typescript
+ - uglify-js
+ - utf-8-validate
+ - webpack-cli
dev: true
- /@vuepress/cli@2.0.0-rc.0:
- resolution: {integrity: sha512-XWSIFO9iOR7N4O2lXIwS5vZuLjU9WU/aGAtmhMWEMxrdMx7TQaJbgrfpTUEbHMf+cPI1DXBbUbtmkqIvtfOV0w==}
+ /@vuepress/cli@2.0.0-rc.9:
+ resolution: {integrity: sha512-uv7Xmv3QmPpzCaUAq0oKEwp2tY64AO+7mxamgr7tr+t6FEnCYqr+X0nLlH17UtMkmGWIsbHLIlMjteprxGxIMg==}
hasBin: true
dependencies:
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
+ '@vuepress/core': 2.0.0-rc.9
+ '@vuepress/shared': 2.0.0-rc.9
+ '@vuepress/utils': 2.0.0-rc.9
cac: 6.7.14
- chokidar: 3.5.3
- envinfo: 7.11.0
- esbuild: 0.19.9
+ chokidar: 3.6.0
+ envinfo: 7.11.1
+ esbuild: 0.20.2
transitivePeerDependencies:
- - '@vue/composition-api'
- supports-color
- typescript
dev: true
- /@vuepress/client@2.0.0-rc.0:
- resolution: {integrity: sha512-TwQx8hJgYONYxX+QltZ2aw9O5Ym6SKelfiUduuIRb555B1gece/jSVap3H/ZwyBhpgJMtG4+/Mrmf8nlDSHjvw==}
+ /@vuepress/client@2.0.0-rc.9:
+ resolution: {integrity: sha512-V5jA6L1nHQ8tXBshRHBJKei7HPFonGxFzmVK5yjj2Ho/Xtp/SD9rBS6dyYd5CSkKRGQDgy19Z+BUUPXtdI1qzg==}
dependencies:
- '@vue/devtools-api': 6.5.1
- '@vuepress/shared': 2.0.0-rc.0
- '@vueuse/core': 10.7.0(vue@3.3.11)
- vue: 3.3.11
- vue-router: 4.2.5(vue@3.3.11)
+ '@vue/devtools-api': 6.6.1
+ '@vuepress/shared': 2.0.0-rc.9
+ vue: 3.4.21
+ vue-router: 4.3.0(vue@3.4.21)
transitivePeerDependencies:
- - '@vue/composition-api'
- typescript
dev: true
- /@vuepress/core@2.0.0-rc.0:
- resolution: {integrity: sha512-uoOaZP1MdxZYJIAJcRcmYKKeCIVnxZeOuLMOOB9CPuAKSalT1RvJ1lztw6RX3q9SPnlqtSZPQXDncPAZivw4pA==}
+ /@vuepress/core@2.0.0-rc.9:
+ resolution: {integrity: sha512-uvMkIqYJ7vjfYEC91rMmT8YJt8xXnob5YYY3TzlwWUSEv4yoV3nlVu0l6Zfhenx/7FwKaxRJ/ePlUGIgUHBcBw==}
dependencies:
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/markdown': 2.0.0-rc.0
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- vue: 3.3.11
+ '@vuepress/client': 2.0.0-rc.9
+ '@vuepress/markdown': 2.0.0-rc.9
+ '@vuepress/shared': 2.0.0-rc.9
+ '@vuepress/utils': 2.0.0-rc.9
+ vue: 3.4.21
transitivePeerDependencies:
- - '@vue/composition-api'
- supports-color
- typescript
dev: true
- /@vuepress/markdown@2.0.0-rc.0:
- resolution: {integrity: sha512-USmqdKKMT6ZFHYRztTjKUlO8qgGfnEygMAAq4AzC/uYXiEfrbMBLAWJhteyGS56P3rGLj0OPAhksE681bX/wOg==}
+ /@vuepress/helper@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-yKIG8hwsrA63uWo9hx9u7KBc0HvotKe2/0wVZtUdvdsibG3UYNI9enYQNa8MdqbxF92mmlFkPZdosGjUTA+BYw==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
+ dependencies:
+ '@vue/shared': 3.4.21
+ cheerio: 1.0.0-rc.12
+ fflate: 0.8.2
+ gray-matter: 4.0.3
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
+ transitivePeerDependencies:
+ - typescript
+ dev: true
+
+ /@vuepress/markdown@2.0.0-rc.9:
+ resolution: {integrity: sha512-e7as2ar3RQp0bUyMiwBPi7L/G2fzscb3s0BywNcAwubFR22o0/dBEYRYdrN0clPQ2FXpPxF6AFj4aD7O1heCbw==}
dependencies:
- '@mdit-vue/plugin-component': 1.0.0
- '@mdit-vue/plugin-frontmatter': 1.0.0
- '@mdit-vue/plugin-headers': 1.0.0
- '@mdit-vue/plugin-sfc': 1.0.0
- '@mdit-vue/plugin-title': 1.0.0
- '@mdit-vue/plugin-toc': 1.0.0
- '@mdit-vue/shared': 1.0.0
- '@mdit-vue/types': 1.0.0
+ '@mdit-vue/plugin-component': 2.0.0
+ '@mdit-vue/plugin-frontmatter': 2.0.0
+ '@mdit-vue/plugin-headers': 2.0.0
+ '@mdit-vue/plugin-sfc': 2.0.0
+ '@mdit-vue/plugin-title': 2.0.0
+ '@mdit-vue/plugin-toc': 2.0.0
+ '@mdit-vue/shared': 2.0.0
+ '@mdit-vue/types': 2.0.0
'@types/markdown-it': 13.0.7
'@types/markdown-it-emoji': 2.0.4
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- markdown-it: 13.0.2
- markdown-it-anchor: 8.6.7(@types/markdown-it@13.0.7)(markdown-it@13.0.2)
- markdown-it-emoji: 2.0.2
- mdurl: 1.0.1
+ '@vuepress/shared': 2.0.0-rc.9
+ '@vuepress/utils': 2.0.0-rc.9
+ markdown-it: 14.1.0
+ markdown-it-anchor: 8.6.7(@types/markdown-it@13.0.7)(markdown-it@14.1.0)
+ markdown-it-emoji: 3.0.0
+ mdurl: 2.0.0
transitivePeerDependencies:
- supports-color
dev: true
- /@vuepress/plugin-active-header-links@2.0.0-rc.0:
- resolution: {integrity: sha512-UJdXLYNGL5Wjy5YGY8M2QgqT75bZ95EHebbqGi8twBdIJE9O+bM+dPJyYtAk2PIVqFORiw3Hj+PchsNSxdn9+g==}
+ /@vuepress/plugin-active-header-links@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-6i9TfGDV1zfszQ5aw6bV+/UvPdBWt3VxN2WB4Dg5o1g8Qn4z5CI6AW6VfLKRyaKUD+Rzj6W+Ikgx4xnF5RZAdA==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- ts-debounce: 4.0.0
- vue: 3.3.11
- vue-router: 4.2.5(vue@3.3.11)
+ '@vueuse/core': 10.9.0(vue@3.4.21)
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/plugin-back-to-top@2.0.0-rc.0:
- resolution: {integrity: sha512-6GPfuzV5lkAnR00BxRUhqMXwMWt741alkq2R6bln4N8BneSOwEpX/7vi19MGf232aKdS/Va4pF5p0/nJ8Sed/g==}
+ /@vuepress/plugin-back-to-top@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-qEWu0BFvadJZRH1r1UQW4mHvBYwHGSJtwNv14C/Qmxuvv2UQnpl8T2qbvPAntUWMdy94wVhr2YWCfyLh7TSEOA==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- ts-debounce: 4.0.0
- vue: 3.3.11
+ '@vuepress/helper': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vueuse/core': 10.9.0(vue@3.4.21)
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/plugin-container@2.0.0-rc.0:
- resolution: {integrity: sha512-b7vrLN11YE7qiUDPfA3N9P7Z8fupe9Wbcr9KAE/bmfZ9VT4d6kzpVyoU7XHi99XngitsmnkaXP4aBvBF1c2AnA==}
+ /@vuepress/plugin-container@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-NEIjs+KmVeB70Do9eB66fIe947Qr+mY8TIpSBYWvTDMaAEYSJlIlQHdaXD6b/lJ/TuTDB4F4BzZmemJZEpAchw==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
'@types/markdown-it': 13.0.7
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/markdown': 2.0.0-rc.0
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- markdown-it: 13.0.2
- markdown-it-container: 3.0.0
+ markdown-it: 14.1.0
+ markdown-it-container: 4.0.0
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
+ dev: true
+
+ /@vuepress/plugin-copy-code@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-280jsPmI/YeKBnXt/MCw/nrv9pUou+zhHK5mOU3ecVYfY7Pu2Xi1zdZ2kK0Ri02Txm5AwLb5YWeSac349JuUUA==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
+ dependencies:
+ '@vuepress/helper': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vueuse/core': 10.9.0(vue@3.4.21)
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/plugin-docsearch@2.0.0-rc.0(@algolia/client-search@4.21.1)(search-insights@2.13.0):
- resolution: {integrity: sha512-bFbb+RxNyoLVbojv3Fh3UNfMmx9tszdae5ni9nG2xa05giCRwGKT0wFG3Q6n0a9kIQ6V7z3PjCj9x1k4SALPEA==}
+ /@vuepress/plugin-docsearch@2.0.0-rc.21(@algolia/client-search@4.23.0)(search-insights@2.13.0)(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-ekbtJyqcnd+J00rb905B8/9KTa4isA+MIsy/r2N+a8Fn0v/GMnGVSseqJs8q74YmHlIx4GawDfKy7F84zxMfjA==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@docsearch/css': 3.5.2
- '@docsearch/js': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0)
- '@docsearch/react': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0)
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- '@vueuse/core': 10.7.0(vue@3.3.11)
+ '@docsearch/css': 3.6.0
+ '@docsearch/js': 3.6.0(@algolia/client-search@4.23.0)(search-insights@2.13.0)
+ '@docsearch/react': 3.6.0(@algolia/client-search@4.23.0)(search-insights@2.13.0)
+ '@vueuse/core': 10.9.0(vue@3.4.21)
ts-debounce: 4.0.0
- vue: 3.3.11
- vue-router: 4.2.5(vue@3.3.11)
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- '@algolia/client-search'
- '@types/react'
@@ -957,276 +1108,542 @@ packages:
- react
- react-dom
- search-insights
- - supports-color
- typescript
dev: true
- /@vuepress/plugin-external-link-icon@2.0.0-rc.0:
- resolution: {integrity: sha512-o8bk0oIlj/BkKc02mq91XLDloq1VOz/8iNcRwKAeqBE6svXzdYiyoTGet0J/4iPuAetsCn75S57W6RioDJHMnQ==}
+ /@vuepress/plugin-external-link-icon@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-Wt7hjWpoUccJHj5KHK24Uks+6oWug6y5cw9QzWlNgiCyg+hvII7I+FdORRvibPUG2ndymi6ZOFyJZcR072kbKA==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/markdown': 2.0.0-rc.0
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- vue: 3.3.11
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- - '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/plugin-git@2.0.0-rc.0:
- resolution: {integrity: sha512-r7UF77vZxaYeJQLygzodKv+15z3/dTLuGp4VcYO21W6BlJZvd4u9zqgiV7A//bZQvK4+3Hprylr0G3KgXqMewA==}
+ /@vuepress/plugin-git@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-Xgrqv86bjrBPFrJr69b1KQlDUhAGhWfBRIGM3GQOI98mOi2VKCX9P4xyWK/lIpn8eVB3s0lY1KewhkXgy7UITg==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
execa: 8.0.1
- transitivePeerDependencies:
- - '@vue/composition-api'
- - supports-color
- - typescript
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
dev: true
- /@vuepress/plugin-google-analytics@2.0.0-rc.15(vuepress@2.0.0-rc.0):
- resolution: {integrity: sha512-ovMpOYz0fFoVcRVgyv+7qnU7LPnovocbtYPk+oPspd9hMedYXjAMeyxOYYnN/MiC6+DSKshDqStTfjVpW3x9DQ==}
+ /@vuepress/plugin-google-analytics@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-ZqDw3MrxA3tY5WXD/LIaZoCSgnynO9aQboOUgm1SF8GoR/7ULqiCWmxbLD3L8kkWS3TWKnH+JwK0VG0J6FNyFA==}
peerDependencies:
- vuepress: 2.0.0-rc.8
+ vuepress: 2.0.0-rc.9
dependencies:
- vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11)
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
dev: true
- /@vuepress/plugin-medium-zoom@2.0.0-rc.0:
- resolution: {integrity: sha512-peU1lYKsmKikIe/0pkJuHzD/k6xW2TuqdvKVhV4I//aOE1WxsREKJ4ACcldmoIsnysoDydAUqKT6xDPGyDsH2g==}
+ /@vuepress/plugin-links-check@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-3dIXKJILTDP7RoPVmhtq/RfytZqX1sCA9Bf++DlgQV6jp2ctcTf4F9I5J/2wQce8yuLogO8fHnWhEgO2rgQXLw==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- medium-zoom: 1.1.0
- vue: 3.3.11
+ '@vuepress/helper': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- - '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/plugin-nprogress@2.0.0-rc.0:
- resolution: {integrity: sha512-rI+eK0Pg1KiZE+7hGmDUeSbgdWCid8Vnw0hFKNmjinDzGVmx4m03M6qfvclsI0SryH+lR7itZGLaR4gbTlrz/w==}
+ /@vuepress/plugin-medium-zoom@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-UZmh5vm/X/czJoVwXxTc+p9lRiAjI/7DrBvj9V7m4DNecPjsMtH/X9oU0Dqn+PeKeTHAEK+EVwy4PUtWc17hIQ==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- vue: 3.3.11
- vue-router: 4.2.5(vue@3.3.11)
+ '@vuepress/helper': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ medium-zoom: 1.1.0
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- - '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/plugin-palette@2.0.0-rc.0:
- resolution: {integrity: sha512-wW70SCp3/K7s1lln5YQsBGTog2WXaQv5piva5zhXcQ47YGf4aAJpThDa5C/ot4HhkPOKn8Iz5s0ckxXZzW8DIg==}
+ /@vuepress/plugin-nprogress@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-qpGA76195SyfpuQC1Pb9LwgCYIp/zg+BBDnexukJMdLjP1KnaU7HLhS5NnRNIWv8E+IC61zLvlh/wRox17QE+w==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- chokidar: 3.5.3
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- - '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/plugin-prismjs@2.0.0-rc.0:
- resolution: {integrity: sha512-c5WRI7+FhVjdbymOKQ8F2KY/Bnv7aQtWScVk8vCMUimNi7v7Wff/A/i3KSFNz/tge3LxiAeH/Dc2WS/OnQXwCg==}
+ /@vuepress/plugin-palette@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-jnWzTiM3xHXweD3AKZVTCnuliH/aoIGaV1C5yhIeinXPZHn49syH8wMQ3kAgxWO+Y4xfihiY8E32V33XQ8Lf6w==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
+ dependencies:
+ chokidar: 3.6.0
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
+ dev: true
+
+ /@vuepress/plugin-prismjs@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-dMTCu/TZ1QCmTHXL4THVeh9gWzuqkJV8qhck5U77OP1qmgyf+r529A+MTOgp3ddcph1Yzb/FRb2orlefHk+yNQ==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@vuepress/core': 2.0.0-rc.0
prismjs: 1.29.0
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
+ dev: true
+
+ /@vuepress/plugin-seo@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-q8pXhXn5OL0QG6KN9rjyXngj2km5eRDK0VL8ShLrTD9fAwvjhujhjHpI/DRHg6ScWlMDKY7ncEOmslDCBuKLtg==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
+ dependencies:
+ '@vuepress/helper': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- - '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/plugin-theme-data@2.0.0-rc.0:
- resolution: {integrity: sha512-FXY3/Ml+rM6gNKvwdBF6vKAcwnSvtXCzKgQwJAw3ppQTKUkLcbOxqM+h4d8bzHWAAvdnEvQFug5uEZgWllBQbA==}
+ /@vuepress/plugin-sitemap@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-YbotKptHfifjwmXhj4kX6iA8tCGp7gTZAHm9YiPDr/8dYzBkkQ4oC84JCifkZYt3fWkVqq/Qa0vpJfnKPGOidg==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
dependencies:
- '@vue/devtools-api': 6.5.1
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- vue: 3.3.11
+ '@vuepress/helper': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ sitemap: 7.1.1
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
+ transitivePeerDependencies:
+ - typescript
+ dev: true
+
+ /@vuepress/plugin-theme-data@2.0.0-rc.21(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-vLXvTKx4gWXY6oVaJ9Z2ECnojnKQuXBIe1ZGIAwJdxCYfr6aaqggrVvmphB8BwTURh0XAuis/l6YTcMrs0bX8Q==}
+ peerDependencies:
+ vuepress: 2.0.0-rc.9
+ dependencies:
+ '@vue/devtools-api': 6.6.1
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- - '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/shared@2.0.0-rc.0:
- resolution: {integrity: sha512-ikdSfjRv5LGM1iv4HHwF9P6gqTjaFCXKPK+hzlkHFHNZO1GLqk7/BPc4F51tAG1s8TcLhUZc+54LrfgS7PkXXA==}
+ /@vuepress/shared@2.0.0-rc.9:
+ resolution: {integrity: sha512-XfI6CWNv4/Vp9Iew6GJil9RUSy1rM7zGdjwikr0j3Rkh55q3f00w1wud47wE9kxRqsZ0PIvsMget5CxEn5rA/w==}
dependencies:
- '@mdit-vue/types': 1.0.0
- '@vue/shared': 3.3.11
+ '@mdit-vue/types': 2.0.0
dev: true
- /@vuepress/theme-default@2.0.0-rc.0:
- resolution: {integrity: sha512-I8Y08evDmMuD1jh3NftPpFFSlCWOizQDJLjN7EQwcg7jiAP4A7c2REo6nBN2EmP24Mi7UrRM+RnytHR5V+pElA==}
+ /@vuepress/theme-default@2.0.0-rc.21(sass-loader@14.1.1)(vuepress@2.0.0-rc.9):
+ resolution: {integrity: sha512-TXwFbfcMQvNMOpt4R39i9vuVWhB3TdM9VLB6aWDjwxEMTMK4Qpwc2VXX7drA7YcjjGkPpW1h7/l7xlEbvRcnsQ==}
peerDependencies:
- sass-loader: ^13.3.2
+ sass-loader: ^14.0.0
+ vuepress: 2.0.0-rc.9
peerDependenciesMeta:
sass-loader:
optional: true
dependencies:
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/plugin-active-header-links': 2.0.0-rc.0
- '@vuepress/plugin-back-to-top': 2.0.0-rc.0
- '@vuepress/plugin-container': 2.0.0-rc.0
- '@vuepress/plugin-external-link-icon': 2.0.0-rc.0
- '@vuepress/plugin-git': 2.0.0-rc.0
- '@vuepress/plugin-medium-zoom': 2.0.0-rc.0
- '@vuepress/plugin-nprogress': 2.0.0-rc.0
- '@vuepress/plugin-palette': 2.0.0-rc.0
- '@vuepress/plugin-prismjs': 2.0.0-rc.0
- '@vuepress/plugin-theme-data': 2.0.0-rc.0
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- '@vueuse/core': 10.7.0(vue@3.3.11)
- sass: 1.69.5
- vue: 3.3.11
- vue-router: 4.2.5(vue@3.3.11)
+ '@vuepress/helper': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-active-header-links': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-back-to-top': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-container': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-copy-code': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-external-link-icon': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-git': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-links-check': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-medium-zoom': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-nprogress': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-palette': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-prismjs': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-seo': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-sitemap': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vuepress/plugin-theme-data': 2.0.0-rc.21(vuepress@2.0.0-rc.9)
+ '@vueuse/core': 10.9.0(vue@3.4.21)
+ sass: 1.72.0
+ sass-loader: 14.1.1(webpack@5.91.0)
+ vue: 3.4.21
+ vuepress: 2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21)
transitivePeerDependencies:
- '@vue/composition-api'
- - supports-color
- typescript
dev: true
- /@vuepress/utils@2.0.0-rc.0:
- resolution: {integrity: sha512-Q1ay/woClDHcW0Qe91KsnHoupdNN0tp/vhjvVLuAYxlv/1Obii7hz9WFcajyyGEhmsYxdvG2sGmcxFA02tuKkw==}
+ /@vuepress/utils@2.0.0-rc.9:
+ resolution: {integrity: sha512-qk6Pel4JVKYKxp3bWxyvnwchvx3QaCWc7SqUw7L6qUo/um+0U2U45L0anWoAfckw12RXYhoIEbJ9UZpueiKOPg==}
dependencies:
'@types/debug': 4.1.12
'@types/fs-extra': 11.0.4
'@types/hash-sum': 1.0.2
- '@vuepress/shared': 2.0.0-rc.0
+ '@vuepress/shared': 2.0.0-rc.9
debug: 4.3.4
fs-extra: 11.2.0
- globby: 14.0.0
+ globby: 14.0.1
hash-sum: 2.0.0
- ora: 7.0.1
+ ora: 8.0.1
picocolors: 1.0.0
upath: 2.0.1
transitivePeerDependencies:
- supports-color
dev: true
- /@vueuse/core@10.7.0(vue@3.3.11):
- resolution: {integrity: sha512-4EUDESCHtwu44ZWK3Gc/hZUVhVo/ysvdtwocB5vcauSV4B7NiGY5972WnsojB3vRNdxvAt7kzJWE2h9h7C9d5w==}
+ /@vueuse/core@10.9.0(vue@3.4.21):
+ resolution: {integrity: sha512-/1vjTol8SXnx6xewDEKfS0Ra//ncg4Hb0DaZiwKf7drgfMsKFExQ+FnnENcN6efPen+1kIzhLQoGSy0eDUVOMg==}
dependencies:
'@types/web-bluetooth': 0.0.20
- '@vueuse/metadata': 10.7.0
- '@vueuse/shared': 10.7.0(vue@3.3.11)
- vue-demi: 0.14.6(vue@3.3.11)
+ '@vueuse/metadata': 10.9.0
+ '@vueuse/shared': 10.9.0(vue@3.4.21)
+ vue-demi: 0.14.7(vue@3.4.21)
transitivePeerDependencies:
- '@vue/composition-api'
- vue
dev: true
- /@vueuse/metadata@10.7.0:
- resolution: {integrity: sha512-GlaH7tKP2iBCZ3bHNZ6b0cl9g0CJK8lttkBNUX156gWvNYhTKEtbweWLm9rxCPIiwzYcr/5xML6T8ZUEt+DkvA==}
+ /@vueuse/metadata@10.9.0:
+ resolution: {integrity: sha512-iddNbg3yZM0X7qFY2sAotomgdHK7YJ6sKUvQqbvwnf7TmaVPxS4EJydcNsVejNdS8iWCtDk+fYXr7E32nyTnGA==}
dev: true
- /@vueuse/shared@10.7.0(vue@3.3.11):
- resolution: {integrity: sha512-kc00uV6CiaTdc3i1CDC4a3lBxzaBE9AgYNtFN87B5OOscqeWElj/uza8qVDmk7/U8JbqoONLbtqiLJ5LGRuqlw==}
+ /@vueuse/shared@10.9.0(vue@3.4.21):
+ resolution: {integrity: sha512-Uud2IWncmAfJvRaFYzv5OHDli+FbOzxiVEQdLCKQKLyhz94PIyFC3CHcH7EDMwIn8NPtD06+PNbC/PiO0LGLtw==}
dependencies:
- vue-demi: 0.14.6(vue@3.3.11)
+ vue-demi: 0.14.7(vue@3.4.21)
transitivePeerDependencies:
- '@vue/composition-api'
- vue
dev: true
- /algoliasearch@4.21.1:
- resolution: {integrity: sha512-Ym0MGwOcjQhZ+s1N/j0o94g3vQD0MzNpWsfJLyPVCt0zHflbi0DwYX+9GPmTJ4BzegoxWMyCPgcmpd3R+VlOzQ==}
+ /@webassemblyjs/ast@1.12.1:
+ resolution: {integrity: sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==}
dependencies:
- '@algolia/cache-browser-local-storage': 4.21.1
- '@algolia/cache-common': 4.21.1
- '@algolia/cache-in-memory': 4.21.1
- '@algolia/client-account': 4.21.1
- '@algolia/client-analytics': 4.21.1
- '@algolia/client-common': 4.21.1
- '@algolia/client-personalization': 4.21.1
- '@algolia/client-search': 4.21.1
- '@algolia/logger-common': 4.21.1
- '@algolia/logger-console': 4.21.1
- '@algolia/requester-browser-xhr': 4.21.1
- '@algolia/requester-common': 4.21.1
- '@algolia/requester-node-http': 4.21.1
- '@algolia/transporter': 4.21.1
+ '@webassemblyjs/helper-numbers': 1.11.6
+ '@webassemblyjs/helper-wasm-bytecode': 1.11.6
dev: true
- /ansi-regex@6.0.1:
- resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==}
- engines: {node: '>=12'}
+ /@webassemblyjs/floating-point-hex-parser@1.11.6:
+ resolution: {integrity: sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==}
dev: true
- /anymatch@3.1.3:
- resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
- engines: {node: '>= 8'}
- dependencies:
- normalize-path: 3.0.0
- picomatch: 2.3.1
+ /@webassemblyjs/helper-api-error@1.11.6:
+ resolution: {integrity: sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==}
dev: true
- /arg@5.0.2:
- resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==}
+ /@webassemblyjs/helper-buffer@1.12.1:
+ resolution: {integrity: sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==}
dev: true
- /argparse@1.0.10:
- resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==}
+ /@webassemblyjs/helper-numbers@1.11.6:
+ resolution: {integrity: sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==}
dependencies:
- sprintf-js: 1.0.3
+ '@webassemblyjs/floating-point-hex-parser': 1.11.6
+ '@webassemblyjs/helper-api-error': 1.11.6
+ '@xtuc/long': 4.2.2
dev: true
- /argparse@2.0.1:
- resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
+ /@webassemblyjs/helper-wasm-bytecode@1.11.6:
+ resolution: {integrity: sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==}
dev: true
- /autoprefixer@10.4.16(postcss@8.4.32):
- resolution: {integrity: sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==}
- engines: {node: ^10 || ^12 || >=14}
- hasBin: true
- peerDependencies:
- postcss: ^8.1.0
+ /@webassemblyjs/helper-wasm-section@1.12.1:
+ resolution: {integrity: sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==}
dependencies:
- browserslist: 4.22.2
- caniuse-lite: 1.0.30001568
- fraction.js: 4.3.7
- normalize-range: 0.1.2
- picocolors: 1.0.0
- postcss: 8.4.32
- postcss-value-parser: 4.2.0
+ '@webassemblyjs/ast': 1.12.1
+ '@webassemblyjs/helper-buffer': 1.12.1
+ '@webassemblyjs/helper-wasm-bytecode': 1.11.6
+ '@webassemblyjs/wasm-gen': 1.12.1
dev: true
- /base64-js@1.5.1:
- resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
+ /@webassemblyjs/ieee754@1.11.6:
+ resolution: {integrity: sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==}
+ dependencies:
+ '@xtuc/ieee754': 1.2.0
dev: true
- /binary-extensions@2.2.0:
- resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==}
- engines: {node: '>=8'}
+ /@webassemblyjs/leb128@1.11.6:
+ resolution: {integrity: sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==}
+ dependencies:
+ '@xtuc/long': 4.2.2
+ dev: true
+
+ /@webassemblyjs/utf8@1.11.6:
+ resolution: {integrity: sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==}
dev: true
- /bl@5.1.0:
- resolution: {integrity: sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==}
+ /@webassemblyjs/wasm-edit@1.12.1:
+ resolution: {integrity: sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==}
dependencies:
- buffer: 6.0.3
- inherits: 2.0.4
- readable-stream: 3.6.2
+ '@webassemblyjs/ast': 1.12.1
+ '@webassemblyjs/helper-buffer': 1.12.1
+ '@webassemblyjs/helper-wasm-bytecode': 1.11.6
+ '@webassemblyjs/helper-wasm-section': 1.12.1
+ '@webassemblyjs/wasm-gen': 1.12.1
+ '@webassemblyjs/wasm-opt': 1.12.1
+ '@webassemblyjs/wasm-parser': 1.12.1
+ '@webassemblyjs/wast-printer': 1.12.1
+ dev: true
+
+ /@webassemblyjs/wasm-gen@1.12.1:
+ resolution: {integrity: sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==}
+ dependencies:
+ '@webassemblyjs/ast': 1.12.1
+ '@webassemblyjs/helper-wasm-bytecode': 1.11.6
+ '@webassemblyjs/ieee754': 1.11.6
+ '@webassemblyjs/leb128': 1.11.6
+ '@webassemblyjs/utf8': 1.11.6
+ dev: true
+
+ /@webassemblyjs/wasm-opt@1.12.1:
+ resolution: {integrity: sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==}
+ dependencies:
+ '@webassemblyjs/ast': 1.12.1
+ '@webassemblyjs/helper-buffer': 1.12.1
+ '@webassemblyjs/wasm-gen': 1.12.1
+ '@webassemblyjs/wasm-parser': 1.12.1
+ dev: true
+
+ /@webassemblyjs/wasm-parser@1.12.1:
+ resolution: {integrity: sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==}
+ dependencies:
+ '@webassemblyjs/ast': 1.12.1
+ '@webassemblyjs/helper-api-error': 1.11.6
+ '@webassemblyjs/helper-wasm-bytecode': 1.11.6
+ '@webassemblyjs/ieee754': 1.11.6
+ '@webassemblyjs/leb128': 1.11.6
+ '@webassemblyjs/utf8': 1.11.6
+ dev: true
+
+ /@webassemblyjs/wast-printer@1.12.1:
+ resolution: {integrity: sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==}
+ dependencies:
+ '@webassemblyjs/ast': 1.12.1
+ '@xtuc/long': 4.2.2
+ dev: true
+
+ /@xtuc/ieee754@1.2.0:
+ resolution: {integrity: sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==}
+ dev: true
+
+ /@xtuc/long@4.2.2:
+ resolution: {integrity: sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==}
+ dev: true
+
+ /accepts@1.3.8:
+ resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==}
+ engines: {node: '>= 0.6'}
+ dependencies:
+ mime-types: 2.1.35
+ negotiator: 0.6.3
+ dev: true
+
+ /acorn-import-assertions@1.9.0(acorn@8.11.3):
+ resolution: {integrity: sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==}
+ peerDependencies:
+ acorn: ^8
+ dependencies:
+ acorn: 8.11.3
+ dev: true
+
+ /acorn@8.11.3:
+ resolution: {integrity: sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==}
+ engines: {node: '>=0.4.0'}
+ hasBin: true
+ dev: true
+
+ /ajv-formats@2.1.1(ajv@8.12.0):
+ resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==}
+ peerDependencies:
+ ajv: ^8.0.0
+ peerDependenciesMeta:
+ ajv:
+ optional: true
+ dependencies:
+ ajv: 8.12.0
+ dev: true
+
+ /ajv-keywords@3.5.2(ajv@6.12.6):
+ resolution: {integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==}
+ peerDependencies:
+ ajv: ^6.9.1
+ dependencies:
+ ajv: 6.12.6
+ dev: true
+
+ /ajv-keywords@5.1.0(ajv@8.12.0):
+ resolution: {integrity: sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==}
+ peerDependencies:
+ ajv: ^8.8.2
+ dependencies:
+ ajv: 8.12.0
+ fast-deep-equal: 3.1.3
+ dev: true
+
+ /ajv@6.12.6:
+ resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==}
+ dependencies:
+ fast-deep-equal: 3.1.3
+ fast-json-stable-stringify: 2.1.0
+ json-schema-traverse: 0.4.1
+ uri-js: 4.4.1
+ dev: true
+
+ /ajv@8.12.0:
+ resolution: {integrity: sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==}
+ dependencies:
+ fast-deep-equal: 3.1.3
+ json-schema-traverse: 1.0.0
+ require-from-string: 2.0.2
+ uri-js: 4.4.1
+ dev: true
+
+ /algoliasearch@4.23.0:
+ resolution: {integrity: sha512-JuM1+MaKovyosHYsLOdfhI7ytOES4qM/U7XAlU53lfGpiQ7+Ct1Wd3A7eP4OWIxxlN+bb42XIX5qZChMCC7rCA==}
+ deprecated: This version contains an issue that will be resolved in 4.23.1
+ dependencies:
+ '@algolia/cache-browser-local-storage': 4.23.0
+ '@algolia/cache-common': 4.23.0
+ '@algolia/cache-in-memory': 4.23.0
+ '@algolia/client-account': 4.23.0
+ '@algolia/client-analytics': 4.23.0
+ '@algolia/client-common': 4.23.0
+ '@algolia/client-personalization': 4.23.0
+ '@algolia/client-search': 4.23.0
+ '@algolia/logger-common': 4.23.0
+ '@algolia/logger-console': 4.23.0
+ '@algolia/recommend': 4.23.0
+ '@algolia/requester-browser-xhr': 4.23.0
+ '@algolia/requester-common': 4.23.0
+ '@algolia/requester-node-http': 4.23.0
+ '@algolia/transporter': 4.23.0
+ dev: true
+
+ /ansi-html-community@0.0.8:
+ resolution: {integrity: sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==}
+ engines: {'0': node >= 0.8.0}
+ hasBin: true
+ dev: true
+
+ /ansi-regex@5.0.1:
+ resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /ansi-regex@6.0.1:
+ resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==}
+ engines: {node: '>=12'}
+ dev: true
+
+ /ansi-styles@3.2.1:
+ resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==}
+ engines: {node: '>=4'}
+ dependencies:
+ color-convert: 1.9.3
+ dev: true
+
+ /ansi-styles@4.3.0:
+ resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
+ engines: {node: '>=8'}
+ dependencies:
+ color-convert: 2.0.1
+ dev: true
+
+ /ansi-styles@6.2.1:
+ resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==}
+ engines: {node: '>=12'}
+ dev: true
+
+ /anymatch@3.1.3:
+ resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
+ engines: {node: '>= 8'}
+ dependencies:
+ normalize-path: 3.0.0
+ picomatch: 2.3.1
+ dev: true
+
+ /arg@5.0.2:
+ resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==}
+ dev: true
+
+ /argparse@1.0.10:
+ resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==}
+ dependencies:
+ sprintf-js: 1.0.3
+ dev: true
+
+ /argparse@2.0.1:
+ resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
+ dev: true
+
+ /array-flatten@1.1.1:
+ resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==}
+ dev: true
+
+ /autoprefixer@10.4.19(postcss@8.4.38):
+ resolution: {integrity: sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==}
+ engines: {node: ^10 || ^12 || >=14}
+ hasBin: true
+ peerDependencies:
+ postcss: ^8.1.0
+ dependencies:
+ browserslist: 4.23.0
+ caniuse-lite: 1.0.30001600
+ fraction.js: 4.3.7
+ normalize-range: 0.1.2
+ picocolors: 1.0.0
+ postcss: 8.4.38
+ postcss-value-parser: 4.2.0
+ dev: true
+
+ /balanced-match@1.0.2:
+ resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
+ dev: true
+
+ /batch@0.6.1:
+ resolution: {integrity: sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==}
+ dev: true
+
+ /big.js@5.2.2:
+ resolution: {integrity: sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==}
+ dev: true
+
+ /binary-extensions@2.3.0:
+ resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /body-parser@1.20.2:
+ resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==}
+ engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
+ dependencies:
+ bytes: 3.1.2
+ content-type: 1.0.5
+ debug: 2.6.9
+ depd: 2.0.0
+ destroy: 1.2.0
+ http-errors: 2.0.0
+ iconv-lite: 0.4.24
+ on-finished: 2.4.1
+ qs: 6.11.0
+ raw-body: 2.5.2
+ type-is: 1.6.18
+ unpipe: 1.0.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /bonjour-service@1.2.1:
+ resolution: {integrity: sha512-oSzCS2zV14bh2kji6vNe7vrpJYCHGvcZnlffFQ1MEoX/WOeQ/teD8SYWKR942OI3INjq8OMNJlbPK5LLLUxFDw==}
+ dependencies:
+ fast-deep-equal: 3.1.3
+ multicast-dns: 7.2.5
dev: true
/boolbase@1.0.0:
resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==}
dev: true
+ /brace-expansion@2.0.1:
+ resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==}
+ dependencies:
+ balanced-match: 1.0.2
+ dev: true
+
/braces@3.0.2:
resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==}
engines: {node: '>=8'}
@@ -1234,22 +1651,36 @@ packages:
fill-range: 7.0.1
dev: true
- /browserslist@4.22.2:
- resolution: {integrity: sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==}
+ /browserslist@4.23.0:
+ resolution: {integrity: sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==}
engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
hasBin: true
dependencies:
- caniuse-lite: 1.0.30001568
- electron-to-chromium: 1.4.611
+ caniuse-lite: 1.0.30001600
+ electron-to-chromium: 1.4.717
node-releases: 2.0.14
- update-browserslist-db: 1.0.13(browserslist@4.22.2)
+ update-browserslist-db: 1.0.13(browserslist@4.23.0)
dev: true
- /buffer@6.0.3:
- resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==}
+ /buffer-from@1.1.2:
+ resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
+ dev: true
+
+ /bundle-name@4.1.0:
+ resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==}
+ engines: {node: '>=18'}
dependencies:
- base64-js: 1.5.1
- ieee754: 1.2.1
+ run-applescript: 7.0.0
+ dev: true
+
+ /bytes@3.0.0:
+ resolution: {integrity: sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==}
+ engines: {node: '>= 0.8'}
+ dev: true
+
+ /bytes@3.1.2:
+ resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==}
+ engines: {node: '>= 0.8'}
dev: true
/cac@6.7.14:
@@ -1257,8 +1688,48 @@ packages:
engines: {node: '>=8'}
dev: true
- /caniuse-lite@1.0.30001568:
- resolution: {integrity: sha512-vSUkH84HontZJ88MiNrOau1EBrCqEQYgkC5gIySiDlpsm8sGVrhU7Kx4V6h0tnqaHzIHZv08HlJIwPbL4XL9+A==}
+ /call-bind@1.0.7:
+ resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ es-define-property: 1.0.0
+ es-errors: 1.3.0
+ function-bind: 1.1.2
+ get-intrinsic: 1.2.4
+ set-function-length: 1.2.2
+ dev: true
+
+ /callsites@3.1.0:
+ resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /camel-case@4.1.2:
+ resolution: {integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==}
+ dependencies:
+ pascal-case: 3.1.2
+ tslib: 2.6.2
+ dev: true
+
+ /caniuse-lite@1.0.30001600:
+ resolution: {integrity: sha512-+2S9/2JFhYmYaDpZvo0lKkfvuKIglrx68MwOBqMGHhQsNkLjB5xtc/TGoEPs+MxjSyN/72qer2g97nzR641mOQ==}
+ dev: true
+
+ /chalk@2.4.2:
+ resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==}
+ engines: {node: '>=4'}
+ dependencies:
+ ansi-styles: 3.2.1
+ escape-string-regexp: 1.0.5
+ supports-color: 5.5.0
+ dev: true
+
+ /chalk@4.1.2:
+ resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
+ engines: {node: '>=10'}
+ dependencies:
+ ansi-styles: 4.3.0
+ supports-color: 7.2.0
dev: true
/chalk@5.3.0:
@@ -1290,8 +1761,8 @@ packages:
parse5-htmlparser2-tree-adapter: 7.0.0
dev: true
- /chokidar@3.5.3:
- resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==}
+ /chokidar@3.6.0:
+ resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
engines: {node: '>= 8.10.0'}
dependencies:
anymatch: 3.1.3
@@ -1305,6 +1776,18 @@ packages:
fsevents: 2.3.3
dev: true
+ /chrome-trace-event@1.0.3:
+ resolution: {integrity: sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==}
+ engines: {node: '>=6.0'}
+ dev: true
+
+ /clean-css@5.3.3:
+ resolution: {integrity: sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg==}
+ engines: {node: '>= 10.0'}
+ dependencies:
+ source-map: 0.6.1
+ dev: true
+
/cli-cursor@4.0.0:
resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
@@ -1317,11 +1800,131 @@ packages:
engines: {node: '>=6'}
dev: true
+ /clone-deep@4.0.1:
+ resolution: {integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==}
+ engines: {node: '>=6'}
+ dependencies:
+ is-plain-object: 2.0.4
+ kind-of: 6.0.3
+ shallow-clone: 3.0.1
+ dev: true
+
+ /color-convert@1.9.3:
+ resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==}
+ dependencies:
+ color-name: 1.1.3
+ dev: true
+
+ /color-convert@2.0.1:
+ resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
+ engines: {node: '>=7.0.0'}
+ dependencies:
+ color-name: 1.1.4
+ dev: true
+
+ /color-name@1.1.3:
+ resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==}
+ dev: true
+
+ /color-name@1.1.4:
+ resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
+ dev: true
+
+ /colorette@2.0.20:
+ resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==}
+ dev: true
+
+ /commander@2.20.3:
+ resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==}
+ dev: true
+
+ /commander@8.3.0:
+ resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==}
+ engines: {node: '>= 12'}
+ dev: true
+
+ /compressible@2.0.18:
+ resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==}
+ engines: {node: '>= 0.6'}
+ dependencies:
+ mime-db: 1.52.0
+ dev: true
+
+ /compression@1.7.4:
+ resolution: {integrity: sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==}
+ engines: {node: '>= 0.8.0'}
+ dependencies:
+ accepts: 1.3.8
+ bytes: 3.0.0
+ compressible: 2.0.18
+ debug: 2.6.9
+ on-headers: 1.0.2
+ safe-buffer: 5.1.2
+ vary: 1.1.2
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
/connect-history-api-fallback@2.0.0:
resolution: {integrity: sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==}
engines: {node: '>=0.8'}
dev: true
+ /content-disposition@0.5.4:
+ resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==}
+ engines: {node: '>= 0.6'}
+ dependencies:
+ safe-buffer: 5.2.1
+ dev: true
+
+ /content-type@1.0.5:
+ resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
+ /cookie-signature@1.0.6:
+ resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==}
+ dev: true
+
+ /cookie@0.6.0:
+ resolution: {integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
+ /copy-webpack-plugin@12.0.2(webpack@5.91.0):
+ resolution: {integrity: sha512-SNwdBeHyII+rWvee/bTnAYyO8vfVdcSTud4EIb6jcZ8inLeWucJE0DnxXQBjlQ5zlteuuvooGQy3LIyGxhvlOA==}
+ engines: {node: '>= 18.12.0'}
+ peerDependencies:
+ webpack: ^5.1.0
+ dependencies:
+ fast-glob: 3.3.2
+ glob-parent: 6.0.2
+ globby: 14.0.1
+ normalize-path: 3.0.0
+ schema-utils: 4.2.0
+ serialize-javascript: 6.0.2
+ webpack: 5.91.0
+ dev: true
+
+ /core-util-is@1.0.3:
+ resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
+ dev: true
+
+ /cosmiconfig@9.0.0:
+ resolution: {integrity: sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ typescript: '>=4.9.5'
+ peerDependenciesMeta:
+ typescript:
+ optional: true
+ dependencies:
+ env-paths: 2.2.1
+ import-fresh: 3.3.0
+ js-yaml: 4.1.0
+ parse-json: 5.2.0
+ dev: true
+
/cross-spawn@7.0.3:
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
engines: {node: '>= 8'}
@@ -1331,6 +1934,39 @@ packages:
which: 2.0.2
dev: true
+ /css-loader@6.10.0(webpack@5.91.0):
+ resolution: {integrity: sha512-LTSA/jWbwdMlk+rhmElbDR2vbtQoTBPr7fkJE+mxrHj+7ru0hUmHafDRzWIjIHTwpitWVaqY2/UWGRca3yUgRw==}
+ engines: {node: '>= 12.13.0'}
+ peerDependencies:
+ '@rspack/core': 0.x || 1.x
+ webpack: ^5.0.0
+ peerDependenciesMeta:
+ '@rspack/core':
+ optional: true
+ webpack:
+ optional: true
+ dependencies:
+ icss-utils: 5.1.0(postcss@8.4.38)
+ postcss: 8.4.38
+ postcss-modules-extract-imports: 3.0.0(postcss@8.4.38)
+ postcss-modules-local-by-default: 4.0.4(postcss@8.4.38)
+ postcss-modules-scope: 3.1.1(postcss@8.4.38)
+ postcss-modules-values: 4.0.0(postcss@8.4.38)
+ postcss-value-parser: 4.2.0
+ semver: 7.6.0
+ webpack: 5.91.0
+ dev: true
+
+ /css-select@4.3.0:
+ resolution: {integrity: sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==}
+ dependencies:
+ boolbase: 1.0.0
+ css-what: 6.1.0
+ domhandler: 4.3.1
+ domutils: 2.8.0
+ nth-check: 2.1.1
+ dev: true
+
/css-select@5.1.0:
resolution: {integrity: sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==}
dependencies:
@@ -1341,17 +1977,45 @@ packages:
nth-check: 2.1.1
dev: true
+ /css-tree@2.2.1:
+ resolution: {integrity: sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==}
+ engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0, npm: '>=7.0.0'}
+ dependencies:
+ mdn-data: 2.0.28
+ source-map-js: 1.2.0
+ dev: true
+
/css-what@6.1.0:
resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==}
engines: {node: '>= 6'}
dev: true
+ /cssesc@3.0.0:
+ resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==}
+ engines: {node: '>=4'}
+ hasBin: true
+ dev: true
+
+ /csso@5.0.5:
+ resolution: {integrity: sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==}
+ engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0, npm: '>=7.0.0'}
+ dependencies:
+ css-tree: 2.2.1
+ dev: true
+
/csstype@3.1.3:
resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==}
dev: true
- /dayjs@1.11.10:
- resolution: {integrity: sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ==}
+ /debug@2.6.9:
+ resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
+ peerDependencies:
+ supports-color: '*'
+ peerDependenciesMeta:
+ supports-color:
+ optional: true
+ dependencies:
+ ms: 2.0.0
dev: true
/debug@4.3.4:
@@ -1366,6 +2030,85 @@ packages:
ms: 2.1.2
dev: true
+ /deepmerge@1.5.2:
+ resolution: {integrity: sha512-95k0GDqvBjZavkuvzx/YqVLv/6YYa17fz6ILMSf7neqQITCPbnfEnQvEgMPNjH4kgobe7+WIL0yJEHku+H3qtQ==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /default-browser-id@5.0.0:
+ resolution: {integrity: sha512-A6p/pu/6fyBcA1TRz/GqWYPViplrftcW2gZC9q79ngNCKAeR/X3gcEdXQHl4KNXV+3wgIJ1CPkJQ3IHM6lcsyA==}
+ engines: {node: '>=18'}
+ dev: true
+
+ /default-browser@5.2.1:
+ resolution: {integrity: sha512-WY/3TUME0x3KPYdRRxEJJvXRHV4PyPoUsxtZa78lwItwRQRHhd2U9xOscaT/YTf8uCXIAjeJOFBVEh/7FtD8Xg==}
+ engines: {node: '>=18'}
+ dependencies:
+ bundle-name: 4.1.0
+ default-browser-id: 5.0.0
+ dev: true
+
+ /default-gateway@6.0.3:
+ resolution: {integrity: sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==}
+ engines: {node: '>= 10'}
+ dependencies:
+ execa: 5.1.1
+ dev: true
+
+ /define-data-property@1.1.4:
+ resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ es-define-property: 1.0.0
+ es-errors: 1.3.0
+ gopd: 1.0.1
+ dev: true
+
+ /define-lazy-prop@3.0.0:
+ resolution: {integrity: sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==}
+ engines: {node: '>=12'}
+ dev: true
+
+ /depd@1.1.2:
+ resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
+ /depd@2.0.0:
+ resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==}
+ engines: {node: '>= 0.8'}
+ dev: true
+
+ /destroy@1.2.0:
+ resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==}
+ engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
+ dev: true
+
+ /detect-node@2.1.0:
+ resolution: {integrity: sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==}
+ dev: true
+
+ /dns-packet@5.6.1:
+ resolution: {integrity: sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==}
+ engines: {node: '>=6'}
+ dependencies:
+ '@leichtgewicht/ip-codec': 2.0.4
+ dev: true
+
+ /dom-converter@0.2.0:
+ resolution: {integrity: sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==}
+ dependencies:
+ utila: 0.4.0
+ dev: true
+
+ /dom-serializer@1.4.1:
+ resolution: {integrity: sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==}
+ dependencies:
+ domelementtype: 2.3.0
+ domhandler: 4.3.1
+ entities: 2.2.0
+ dev: true
+
/dom-serializer@2.0.0:
resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==}
dependencies:
@@ -1378,6 +2121,13 @@ packages:
resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==}
dev: true
+ /domhandler@4.3.1:
+ resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==}
+ engines: {node: '>= 4'}
+ dependencies:
+ domelementtype: 2.3.0
+ dev: true
+
/domhandler@5.0.3:
resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==}
engines: {node: '>= 4'}
@@ -1385,6 +2135,14 @@ packages:
domelementtype: 2.3.0
dev: true
+ /domutils@2.8.0:
+ resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==}
+ dependencies:
+ dom-serializer: 1.4.1
+ domelementtype: 2.3.0
+ domhandler: 4.3.1
+ dev: true
+
/domutils@3.1.0:
resolution: {integrity: sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==}
dependencies:
@@ -1393,79 +2151,218 @@ packages:
domhandler: 5.0.3
dev: true
+ /dot-case@3.0.4:
+ resolution: {integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==}
+ dependencies:
+ no-case: 3.0.4
+ tslib: 2.6.2
+ dev: true
+
/eastasianwidth@0.2.0:
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
dev: true
- /electron-to-chromium@1.4.611:
- resolution: {integrity: sha512-ZtRpDxrjHapOwxtv+nuth5ByB8clyn8crVynmRNGO3wG3LOp8RTcyZDqwaI6Ng6y8FCK2hVZmJoqwCskKbNMaw==}
+ /ee-first@1.1.1:
+ resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==}
+ dev: true
+
+ /electron-to-chromium@1.4.717:
+ resolution: {integrity: sha512-6Fmg8QkkumNOwuZ/5mIbMU9WI3H2fmn5ajcVya64I5Yr5CcNmO7vcLt0Y7c96DCiMO5/9G+4sI2r6eEvdg1F7A==}
dev: true
/emoji-regex@10.3.0:
resolution: {integrity: sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==}
dev: true
- /entities@3.0.1:
- resolution: {integrity: sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==}
- engines: {node: '>=0.12'}
+ /emoji-regex@8.0.0:
+ resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
+ dev: true
+
+ /emoji-regex@9.2.2:
+ resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==}
+ dev: true
+
+ /emojis-list@3.0.0:
+ resolution: {integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==}
+ engines: {node: '>= 4'}
+ dev: true
+
+ /encodeurl@1.0.2:
+ resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==}
+ engines: {node: '>= 0.8'}
+ dev: true
+
+ /enhanced-resolve@5.16.0:
+ resolution: {integrity: sha512-O+QWCviPNSSLAD9Ucn8Awv+poAkqn3T1XY5/N7kR7rQO9yfSGWkYZDwpJ+iKF7B8rxaQKWngSqACpgzeapSyoA==}
+ engines: {node: '>=10.13.0'}
+ dependencies:
+ graceful-fs: 4.2.11
+ tapable: 2.2.1
+ dev: true
+
+ /entities@2.2.0:
+ resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==}
+ dev: true
+
+ /entities@4.5.0:
+ resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==}
+ engines: {node: '>=0.12'}
+ dev: true
+
+ /env-paths@2.2.1:
+ resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /envinfo@7.11.1:
+ resolution: {integrity: sha512-8PiZgZNIB4q/Lw4AhOvAfB/ityHAd2bli3lESSWmWSzSsl5dKpy5N1d1Rfkd2teq/g9xN90lc6o98DOjMeYHpg==}
+ engines: {node: '>=4'}
+ hasBin: true
+ dev: true
+
+ /error-ex@1.3.2:
+ resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==}
+ dependencies:
+ is-arrayish: 0.2.1
+ dev: true
+
+ /es-define-property@1.0.0:
+ resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ get-intrinsic: 1.2.4
+ dev: true
+
+ /es-errors@1.3.0:
+ resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
+ engines: {node: '>= 0.4'}
dev: true
- /entities@4.5.0:
- resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==}
- engines: {node: '>=0.12'}
+ /es-module-lexer@1.5.0:
+ resolution: {integrity: sha512-pqrTKmwEIgafsYZAGw9kszYzmagcE/n4dbgwGWLEXg7J4QFJVQRBld8j3Q3GNez79jzxZshq0bcT962QHOghjw==}
dev: true
- /envinfo@7.11.0:
- resolution: {integrity: sha512-G9/6xF1FPbIw0TtalAMaVPpiq2aDEuKLXM314jPVAO9r2fo2a4BLqMNkmRS7O/xPPZ+COAhGIz3ETvHEV3eUcg==}
- engines: {node: '>=4'}
- hasBin: true
+ /esbuild-loader@4.1.0(webpack@5.91.0):
+ resolution: {integrity: sha512-543TtIvqbqouEMlOHg4xKoDQkmdImlwIpyAIgpUtDPvMuklU/c2k+Qt2O3VeDBgAwozxmlEbjOzV+F8CZ0g+Bw==}
+ peerDependencies:
+ webpack: ^4.40.0 || ^5.0.0
+ dependencies:
+ esbuild: 0.20.2
+ get-tsconfig: 4.7.3
+ loader-utils: 2.0.4
+ webpack: 5.91.0
+ webpack-sources: 1.4.3
dev: true
- /esbuild@0.19.9:
- resolution: {integrity: sha512-U9CHtKSy+EpPsEBa+/A2gMs/h3ylBC0H0KSqIg7tpztHerLi6nrrcoUJAkNCEPumx8yJ+Byic4BVwHgRbN0TBg==}
+ /esbuild@0.20.2:
+ resolution: {integrity: sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==}
engines: {node: '>=12'}
hasBin: true
requiresBuild: true
optionalDependencies:
- '@esbuild/android-arm': 0.19.9
- '@esbuild/android-arm64': 0.19.9
- '@esbuild/android-x64': 0.19.9
- '@esbuild/darwin-arm64': 0.19.9
- '@esbuild/darwin-x64': 0.19.9
- '@esbuild/freebsd-arm64': 0.19.9
- '@esbuild/freebsd-x64': 0.19.9
- '@esbuild/linux-arm': 0.19.9
- '@esbuild/linux-arm64': 0.19.9
- '@esbuild/linux-ia32': 0.19.9
- '@esbuild/linux-loong64': 0.19.9
- '@esbuild/linux-mips64el': 0.19.9
- '@esbuild/linux-ppc64': 0.19.9
- '@esbuild/linux-riscv64': 0.19.9
- '@esbuild/linux-s390x': 0.19.9
- '@esbuild/linux-x64': 0.19.9
- '@esbuild/netbsd-x64': 0.19.9
- '@esbuild/openbsd-x64': 0.19.9
- '@esbuild/sunos-x64': 0.19.9
- '@esbuild/win32-arm64': 0.19.9
- '@esbuild/win32-ia32': 0.19.9
- '@esbuild/win32-x64': 0.19.9
- dev: true
-
- /escalade@3.1.1:
- resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==}
+ '@esbuild/aix-ppc64': 0.20.2
+ '@esbuild/android-arm': 0.20.2
+ '@esbuild/android-arm64': 0.20.2
+ '@esbuild/android-x64': 0.20.2
+ '@esbuild/darwin-arm64': 0.20.2
+ '@esbuild/darwin-x64': 0.20.2
+ '@esbuild/freebsd-arm64': 0.20.2
+ '@esbuild/freebsd-x64': 0.20.2
+ '@esbuild/linux-arm': 0.20.2
+ '@esbuild/linux-arm64': 0.20.2
+ '@esbuild/linux-ia32': 0.20.2
+ '@esbuild/linux-loong64': 0.20.2
+ '@esbuild/linux-mips64el': 0.20.2
+ '@esbuild/linux-ppc64': 0.20.2
+ '@esbuild/linux-riscv64': 0.20.2
+ '@esbuild/linux-s390x': 0.20.2
+ '@esbuild/linux-x64': 0.20.2
+ '@esbuild/netbsd-x64': 0.20.2
+ '@esbuild/openbsd-x64': 0.20.2
+ '@esbuild/sunos-x64': 0.20.2
+ '@esbuild/win32-arm64': 0.20.2
+ '@esbuild/win32-ia32': 0.20.2
+ '@esbuild/win32-x64': 0.20.2
+ dev: true
+
+ /escalade@3.1.2:
+ resolution: {integrity: sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==}
engines: {node: '>=6'}
dev: true
+ /escape-html@1.0.3:
+ resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==}
+ dev: true
+
+ /escape-string-regexp@1.0.5:
+ resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==}
+ engines: {node: '>=0.8.0'}
+ dev: true
+
+ /eslint-scope@5.1.1:
+ resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==}
+ engines: {node: '>=8.0.0'}
+ dependencies:
+ esrecurse: 4.3.0
+ estraverse: 4.3.0
+ dev: true
+
/esprima@4.0.1:
resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==}
engines: {node: '>=4'}
hasBin: true
dev: true
+ /esrecurse@4.3.0:
+ resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==}
+ engines: {node: '>=4.0'}
+ dependencies:
+ estraverse: 5.3.0
+ dev: true
+
+ /estraverse@4.3.0:
+ resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==}
+ engines: {node: '>=4.0'}
+ dev: true
+
+ /estraverse@5.3.0:
+ resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==}
+ engines: {node: '>=4.0'}
+ dev: true
+
/estree-walker@2.0.2:
resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==}
dev: true
+ /etag@1.8.1:
+ resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
+ /eventemitter3@4.0.7:
+ resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==}
+ dev: true
+
+ /events@3.3.0:
+ resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==}
+ engines: {node: '>=0.8.x'}
+ dev: true
+
+ /execa@5.1.1:
+ resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==}
+ engines: {node: '>=10'}
+ dependencies:
+ cross-spawn: 7.0.3
+ get-stream: 6.0.1
+ human-signals: 2.1.0
+ is-stream: 2.0.1
+ merge-stream: 2.0.0
+ npm-run-path: 4.0.1
+ onetime: 5.1.2
+ signal-exit: 3.0.7
+ strip-final-newline: 2.0.0
+ dev: true
+
/execa@8.0.1:
resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==}
engines: {node: '>=16.17'}
@@ -1475,12 +2372,51 @@ packages:
human-signals: 5.0.0
is-stream: 3.0.0
merge-stream: 2.0.0
- npm-run-path: 5.1.0
+ npm-run-path: 5.3.0
onetime: 6.0.0
signal-exit: 4.1.0
strip-final-newline: 3.0.0
dev: true
+ /express@4.19.2:
+ resolution: {integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==}
+ engines: {node: '>= 0.10.0'}
+ dependencies:
+ accepts: 1.3.8
+ array-flatten: 1.1.1
+ body-parser: 1.20.2
+ content-disposition: 0.5.4
+ content-type: 1.0.5
+ cookie: 0.6.0
+ cookie-signature: 1.0.6
+ debug: 2.6.9
+ depd: 2.0.0
+ encodeurl: 1.0.2
+ escape-html: 1.0.3
+ etag: 1.8.1
+ finalhandler: 1.2.0
+ fresh: 0.5.2
+ http-errors: 2.0.0
+ merge-descriptors: 1.0.1
+ methods: 1.1.2
+ on-finished: 2.4.1
+ parseurl: 1.3.3
+ path-to-regexp: 0.1.7
+ proxy-addr: 2.0.7
+ qs: 6.11.0
+ range-parser: 1.2.1
+ safe-buffer: 5.2.1
+ send: 0.18.0
+ serve-static: 1.15.0
+ setprototypeof: 1.2.0
+ statuses: 2.0.1
+ type-is: 1.6.18
+ utils-merge: 1.0.1
+ vary: 1.1.2
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
/extend-shallow@2.0.1:
resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==}
engines: {node: '>=0.10.0'}
@@ -1488,6 +2424,10 @@ packages:
is-extendable: 0.1.1
dev: true
+ /fast-deep-equal@3.1.3:
+ resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
+ dev: true
+
/fast-glob@3.3.2:
resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==}
engines: {node: '>=8.6.0'}
@@ -1499,14 +2439,25 @@ packages:
micromatch: 4.0.5
dev: true
- /fastq@1.15.0:
- resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==}
+ /fast-json-stable-stringify@2.1.0:
+ resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==}
+ dev: true
+
+ /fastq@1.17.1:
+ resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==}
dependencies:
reusify: 1.0.4
dev: true
- /fflate@0.8.1:
- resolution: {integrity: sha512-/exOvEuc+/iaUm105QIiOt4LpBdMTWsXxqR0HDF35vx3fmaKzw7354gTilCh5rkzEt8WYyG//ku3h3nRmd7CHQ==}
+ /faye-websocket@0.11.4:
+ resolution: {integrity: sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==}
+ engines: {node: '>=0.8.0'}
+ dependencies:
+ websocket-driver: 0.7.4
+ dev: true
+
+ /fflate@0.8.2:
+ resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==}
dev: true
/fill-range@7.0.1:
@@ -1516,10 +2467,58 @@ packages:
to-regex-range: 5.0.1
dev: true
+ /finalhandler@1.2.0:
+ resolution: {integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==}
+ engines: {node: '>= 0.8'}
+ dependencies:
+ debug: 2.6.9
+ encodeurl: 1.0.2
+ escape-html: 1.0.3
+ on-finished: 2.4.1
+ parseurl: 1.3.3
+ statuses: 2.0.1
+ unpipe: 1.0.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /flat@5.0.2:
+ resolution: {integrity: sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==}
+ hasBin: true
+ dev: true
+
+ /follow-redirects@1.15.6:
+ resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==}
+ engines: {node: '>=4.0'}
+ peerDependencies:
+ debug: '*'
+ peerDependenciesMeta:
+ debug:
+ optional: true
+ dev: true
+
+ /foreground-child@3.1.1:
+ resolution: {integrity: sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==}
+ engines: {node: '>=14'}
+ dependencies:
+ cross-spawn: 7.0.3
+ signal-exit: 4.1.0
+ dev: true
+
+ /forwarded@0.2.0:
+ resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
/fraction.js@4.3.7:
resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==}
dev: true
+ /fresh@0.5.2:
+ resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
/fs-extra@11.2.0:
resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==}
engines: {node: '>=14.14'}
@@ -1537,11 +2536,42 @@ packages:
dev: true
optional: true
+ /function-bind@1.1.2:
+ resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
+ dev: true
+
+ /get-east-asian-width@1.2.0:
+ resolution: {integrity: sha512-2nk+7SIVb14QrgXFHcm84tD4bKQz0RxPuMT8Ag5KPOq7J5fEmAg0UbXdTOSHqNuHSU28k55qnceesxXRZGzKWA==}
+ engines: {node: '>=18'}
+ dev: true
+
+ /get-intrinsic@1.2.4:
+ resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ es-errors: 1.3.0
+ function-bind: 1.1.2
+ has-proto: 1.0.3
+ has-symbols: 1.0.3
+ hasown: 2.0.2
+ dev: true
+
+ /get-stream@6.0.1:
+ resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==}
+ engines: {node: '>=10'}
+ dev: true
+
/get-stream@8.0.1:
resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==}
engines: {node: '>=16'}
dev: true
+ /get-tsconfig@4.7.3:
+ resolution: {integrity: sha512-ZvkrzoUA0PQZM6fy6+/Hce561s+faD1rsNwhnO5FelNjyy7EMGJ3Rz1AQ8GYDWjhRs/7dBLOEJvhK8MiEJOAFg==}
+ dependencies:
+ resolve-pkg-maps: 1.0.0
+ dev: true
+
/glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
@@ -1549,18 +2579,47 @@ packages:
is-glob: 4.0.3
dev: true
- /globby@14.0.0:
- resolution: {integrity: sha512-/1WM/LNHRAOH9lZta77uGbq0dAEQM+XjNesWwhlERDVenqothRbnzTrL3/LrIoEPPjeUHC3vrS6TwoyxeHs7MQ==}
+ /glob-parent@6.0.2:
+ resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==}
+ engines: {node: '>=10.13.0'}
+ dependencies:
+ is-glob: 4.0.3
+ dev: true
+
+ /glob-to-regexp@0.4.1:
+ resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==}
+ dev: true
+
+ /glob@10.3.10:
+ resolution: {integrity: sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==}
+ engines: {node: '>=16 || 14 >=14.17'}
+ hasBin: true
+ dependencies:
+ foreground-child: 3.1.1
+ jackspeak: 2.3.6
+ minimatch: 9.0.3
+ minipass: 7.0.4
+ path-scurry: 1.10.1
+ dev: true
+
+ /globby@14.0.1:
+ resolution: {integrity: sha512-jOMLD2Z7MAhyG8aJpNOpmziMOP4rPLcc95oQPKXBazW82z+CEgPFBQvEpRUa1KeIMUJo4Wsm+q6uzO/Q/4BksQ==}
engines: {node: '>=18'}
dependencies:
- '@sindresorhus/merge-streams': 1.0.0
+ '@sindresorhus/merge-streams': 2.3.0
fast-glob: 3.3.2
- ignore: 5.3.0
+ ignore: 5.3.1
path-type: 5.0.0
slash: 5.1.0
unicorn-magic: 0.1.0
dev: true
+ /gopd@1.0.1:
+ resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==}
+ dependencies:
+ get-intrinsic: 1.2.4
+ dev: true
+
/graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
dev: true
@@ -1575,10 +2634,108 @@ packages:
strip-bom-string: 1.0.0
dev: true
+ /handle-thing@2.0.1:
+ resolution: {integrity: sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==}
+ dev: true
+
+ /has-flag@3.0.0:
+ resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /has-flag@4.0.0:
+ resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /has-property-descriptors@1.0.2:
+ resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==}
+ dependencies:
+ es-define-property: 1.0.0
+ dev: true
+
+ /has-proto@1.0.3:
+ resolution: {integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==}
+ engines: {node: '>= 0.4'}
+ dev: true
+
+ /has-symbols@1.0.3:
+ resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==}
+ engines: {node: '>= 0.4'}
+ dev: true
+
/hash-sum@2.0.0:
resolution: {integrity: sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==}
dev: true
+ /hasown@2.0.2:
+ resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ function-bind: 1.1.2
+ dev: true
+
+ /he@1.2.0:
+ resolution: {integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==}
+ hasBin: true
+ dev: true
+
+ /hpack.js@2.1.6:
+ resolution: {integrity: sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==}
+ dependencies:
+ inherits: 2.0.4
+ obuf: 1.1.2
+ readable-stream: 2.3.8
+ wbuf: 1.7.3
+ dev: true
+
+ /html-entities@2.5.2:
+ resolution: {integrity: sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA==}
+ dev: true
+
+ /html-minifier-terser@6.1.0:
+ resolution: {integrity: sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==}
+ engines: {node: '>=12'}
+ hasBin: true
+ dependencies:
+ camel-case: 4.1.2
+ clean-css: 5.3.3
+ commander: 8.3.0
+ he: 1.2.0
+ param-case: 3.0.4
+ relateurl: 0.2.7
+ terser: 5.29.2
+ dev: true
+
+ /html-webpack-plugin@5.6.0(webpack@5.91.0):
+ resolution: {integrity: sha512-iwaY4wzbe48AfKLZ/Cc8k0L+FKG6oSNRaZ8x5A/T/IVDGyXcbHncM9TdDa93wn0FsSm82FhTKW7f3vS61thXAw==}
+ engines: {node: '>=10.13.0'}
+ peerDependencies:
+ '@rspack/core': 0.x || 1.x
+ webpack: ^5.20.0
+ peerDependenciesMeta:
+ '@rspack/core':
+ optional: true
+ webpack:
+ optional: true
+ dependencies:
+ '@types/html-minifier-terser': 6.1.0
+ html-minifier-terser: 6.1.0
+ lodash: 4.17.21
+ pretty-error: 4.0.0
+ tapable: 2.2.1
+ webpack: 5.91.0
+ dev: true
+
+ /htmlparser2@6.1.0:
+ resolution: {integrity: sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==}
+ dependencies:
+ domelementtype: 2.3.0
+ domhandler: 4.3.1
+ domutils: 2.8.0
+ entities: 2.2.0
+ dev: true
+
/htmlparser2@8.0.2:
resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==}
dependencies:
@@ -1588,74 +2745,269 @@ packages:
entities: 4.5.0
dev: true
+ /http-deceiver@1.2.7:
+ resolution: {integrity: sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==}
+ dev: true
+
+ /http-errors@1.6.3:
+ resolution: {integrity: sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==}
+ engines: {node: '>= 0.6'}
+ dependencies:
+ depd: 1.1.2
+ inherits: 2.0.3
+ setprototypeof: 1.1.0
+ statuses: 1.5.0
+ dev: true
+
+ /http-errors@2.0.0:
+ resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==}
+ engines: {node: '>= 0.8'}
+ dependencies:
+ depd: 2.0.0
+ inherits: 2.0.4
+ setprototypeof: 1.2.0
+ statuses: 2.0.1
+ toidentifier: 1.0.1
+ dev: true
+
+ /http-parser-js@0.5.8:
+ resolution: {integrity: sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==}
+ dev: true
+
+ /http-proxy-middleware@2.0.6(@types/express@4.17.21):
+ resolution: {integrity: sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==}
+ engines: {node: '>=12.0.0'}
+ peerDependencies:
+ '@types/express': ^4.17.13
+ peerDependenciesMeta:
+ '@types/express':
+ optional: true
+ dependencies:
+ '@types/express': 4.17.21
+ '@types/http-proxy': 1.17.14
+ http-proxy: 1.18.1
+ is-glob: 4.0.3
+ is-plain-obj: 3.0.0
+ micromatch: 4.0.5
+ transitivePeerDependencies:
+ - debug
+ dev: true
+
+ /http-proxy@1.18.1:
+ resolution: {integrity: sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==}
+ engines: {node: '>=8.0.0'}
+ dependencies:
+ eventemitter3: 4.0.7
+ follow-redirects: 1.15.6
+ requires-port: 1.0.0
+ transitivePeerDependencies:
+ - debug
+ dev: true
+
+ /human-signals@2.1.0:
+ resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==}
+ engines: {node: '>=10.17.0'}
+ dev: true
+
/human-signals@5.0.0:
resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==}
engines: {node: '>=16.17.0'}
dev: true
- /ieee754@1.2.1:
- resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==}
+ /iconv-lite@0.4.24:
+ resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ safer-buffer: 2.1.2
+ dev: true
+
+ /icss-utils@5.1.0(postcss@8.4.38):
+ resolution: {integrity: sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==}
+ engines: {node: ^10 || ^12 || >= 14}
+ peerDependencies:
+ postcss: ^8.1.0
+ dependencies:
+ postcss: 8.4.38
dev: true
- /ignore@5.3.0:
- resolution: {integrity: sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==}
+ /ignore@5.3.1:
+ resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==}
engines: {node: '>= 4'}
dev: true
- /immutable@4.3.4:
- resolution: {integrity: sha512-fsXeu4J4i6WNWSikpI88v/PcVflZz+6kMhUfIwc5SY+poQRPnaf5V7qds6SUyUN3cVxEzuCab7QIoLOQ+DQ1wA==}
+ /immutable@4.3.5:
+ resolution: {integrity: sha512-8eabxkth9gZatlwl5TBuJnCsoTADlL6ftEr7A4qgdaTsPyreilDSnUk57SO+jfKcNtxPa22U5KK6DSeAYhpBJw==}
+ dev: true
+
+ /import-fresh@3.3.0:
+ resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==}
+ engines: {node: '>=6'}
+ dependencies:
+ parent-module: 1.0.1
+ resolve-from: 4.0.0
+ dev: true
+
+ /inherits@2.0.3:
+ resolution: {integrity: sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==}
dev: true
/inherits@2.0.4:
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
dev: true
- /is-binary-path@2.1.0:
- resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
- engines: {node: '>=8'}
+ /ipaddr.js@1.9.1:
+ resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==}
+ engines: {node: '>= 0.10'}
+ dev: true
+
+ /ipaddr.js@2.1.0:
+ resolution: {integrity: sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ==}
+ engines: {node: '>= 10'}
+ dev: true
+
+ /is-arrayish@0.2.1:
+ resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==}
+ dev: true
+
+ /is-binary-path@2.1.0:
+ resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
+ engines: {node: '>=8'}
+ dependencies:
+ binary-extensions: 2.3.0
+ dev: true
+
+ /is-docker@3.0.0:
+ resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==}
+ engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+ hasBin: true
+ dev: true
+
+ /is-extendable@0.1.1:
+ resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /is-extglob@2.1.1:
+ resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /is-fullwidth-code-point@3.0.0:
+ resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /is-glob@4.0.3:
+ resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ is-extglob: 2.1.1
+ dev: true
+
+ /is-inside-container@1.0.0:
+ resolution: {integrity: sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==}
+ engines: {node: '>=14.16'}
+ hasBin: true
+ dependencies:
+ is-docker: 3.0.0
+ dev: true
+
+ /is-interactive@2.0.0:
+ resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==}
+ engines: {node: '>=12'}
+ dev: true
+
+ /is-network-error@1.1.0:
+ resolution: {integrity: sha512-tUdRRAnhT+OtCZR/LxZelH/C7QtjtFrTu5tXCA8pl55eTUElUHT+GPYV8MBMBvea/j+NxQqVt3LbWMRir7Gx9g==}
+ engines: {node: '>=16'}
+ dev: true
+
+ /is-number@7.0.0:
+ resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
+ engines: {node: '>=0.12.0'}
+ dev: true
+
+ /is-plain-obj@3.0.0:
+ resolution: {integrity: sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /is-plain-object@2.0.4:
+ resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ isobject: 3.0.1
+ dev: true
+
+ /is-stream@2.0.1:
+ resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /is-stream@3.0.0:
+ resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==}
+ engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+ dev: true
+
+ /is-unicode-supported@1.3.0:
+ resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==}
+ engines: {node: '>=12'}
+ dev: true
+
+ /is-unicode-supported@2.0.0:
+ resolution: {integrity: sha512-FRdAyx5lusK1iHG0TWpVtk9+1i+GjrzRffhDg4ovQ7mcidMQ6mj+MhKPmvh7Xwyv5gIS06ns49CA7Sqg7lC22Q==}
+ engines: {node: '>=18'}
+ dev: true
+
+ /is-wsl@3.1.0:
+ resolution: {integrity: sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==}
+ engines: {node: '>=16'}
dependencies:
- binary-extensions: 2.2.0
+ is-inside-container: 1.0.0
dev: true
- /is-extendable@0.1.1:
- resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==}
- engines: {node: '>=0.10.0'}
+ /isarray@1.0.0:
+ resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==}
dev: true
- /is-extglob@2.1.1:
- resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
- engines: {node: '>=0.10.0'}
+ /isexe@2.0.0:
+ resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
dev: true
- /is-glob@4.0.3:
- resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
+ /isobject@3.0.1:
+ resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==}
engines: {node: '>=0.10.0'}
- dependencies:
- is-extglob: 2.1.1
dev: true
- /is-interactive@2.0.0:
- resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==}
- engines: {node: '>=12'}
+ /jackspeak@2.3.6:
+ resolution: {integrity: sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==}
+ engines: {node: '>=14'}
+ dependencies:
+ '@isaacs/cliui': 8.0.2
+ optionalDependencies:
+ '@pkgjs/parseargs': 0.11.0
dev: true
- /is-number@7.0.0:
- resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
- engines: {node: '>=0.12.0'}
+ /javascript-stringify@2.1.0:
+ resolution: {integrity: sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==}
dev: true
- /is-stream@3.0.0:
- resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==}
- engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+ /jest-worker@27.5.1:
+ resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
+ engines: {node: '>= 10.13.0'}
+ dependencies:
+ '@types/node': 20.11.30
+ merge-stream: 2.0.0
+ supports-color: 8.1.1
dev: true
- /is-unicode-supported@1.3.0:
- resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==}
- engines: {node: '>=12'}
+ /jiti@1.21.0:
+ resolution: {integrity: sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q==}
+ hasBin: true
dev: true
- /isexe@2.0.0:
- resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
+ /js-tokens@4.0.0:
+ resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
dev: true
/js-yaml@3.14.1:
@@ -1666,6 +3018,31 @@ packages:
esprima: 4.0.1
dev: true
+ /js-yaml@4.1.0:
+ resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==}
+ hasBin: true
+ dependencies:
+ argparse: 2.0.1
+ dev: true
+
+ /json-parse-even-better-errors@2.3.1:
+ resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==}
+ dev: true
+
+ /json-schema-traverse@0.4.1:
+ resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==}
+ dev: true
+
+ /json-schema-traverse@1.0.0:
+ resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==}
+ dev: true
+
+ /json5@2.2.3:
+ resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==}
+ engines: {node: '>=6'}
+ hasBin: true
+ dev: true
+
/jsonfile@6.1.0:
resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==}
dependencies:
@@ -1679,25 +3056,60 @@ packages:
engines: {node: '>=0.10.0'}
dev: true
- /lilconfig@3.0.0:
- resolution: {integrity: sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==}
- engines: {node: '>=14'}
+ /launch-editor@2.6.1:
+ resolution: {integrity: sha512-eB/uXmFVpY4zezmGp5XtU21kwo7GBbKB+EQ+UZeWtGb9yAM5xt/Evk+lYH3eRNAtId+ej4u7TYPFZ07w4s7rRw==}
+ dependencies:
+ picocolors: 1.0.0
+ shell-quote: 1.8.1
dev: true
- /linkify-it@4.0.1:
- resolution: {integrity: sha512-C7bfi1UZmoj8+PQx22XyeXCuBlokoyWQL5pWSP+EI6nzRylyThouddufc2c1NDIcP9k5agmN9fLpA7VNJfIiqw==}
+ /lines-and-columns@1.2.4:
+ resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
+ dev: true
+
+ /linkify-it@5.0.0:
+ resolution: {integrity: sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==}
dependencies:
- uc.micro: 1.0.6
+ uc.micro: 2.1.0
dev: true
- /log-symbols@5.1.0:
- resolution: {integrity: sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==}
- engines: {node: '>=12'}
+ /loader-runner@4.3.0:
+ resolution: {integrity: sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==}
+ engines: {node: '>=6.11.5'}
+ dev: true
+
+ /loader-utils@2.0.4:
+ resolution: {integrity: sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==}
+ engines: {node: '>=8.9.0'}
+ dependencies:
+ big.js: 5.2.2
+ emojis-list: 3.0.0
+ json5: 2.2.3
+ dev: true
+
+ /lodash@4.17.21:
+ resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
+ dev: true
+
+ /log-symbols@6.0.0:
+ resolution: {integrity: sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==}
+ engines: {node: '>=18'}
dependencies:
chalk: 5.3.0
is-unicode-supported: 1.3.0
dev: true
+ /lower-case@2.0.2:
+ resolution: {integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==}
+ dependencies:
+ tslib: 2.6.2
+ dev: true
+
+ /lru-cache@10.2.0:
+ resolution: {integrity: sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==}
+ engines: {node: 14 || >=16.14}
+ dev: true
+
/lru-cache@6.0.0:
resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==}
engines: {node: '>=10'}
@@ -1705,50 +3117,71 @@ packages:
yallist: 4.0.0
dev: true
- /magic-string@0.30.5:
- resolution: {integrity: sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==}
+ /magic-string@0.30.8:
+ resolution: {integrity: sha512-ISQTe55T2ao7XtlAStud6qwYPZjE4GK1S/BeVPus4jrq6JuOnQ00YKQC581RWhR122W7msZV263KzVeLoqidyQ==}
engines: {node: '>=12'}
dependencies:
'@jridgewell/sourcemap-codec': 1.4.15
dev: true
- /markdown-it-anchor@8.6.7(@types/markdown-it@13.0.7)(markdown-it@13.0.2):
+ /markdown-it-anchor@8.6.7(@types/markdown-it@13.0.7)(markdown-it@14.1.0):
resolution: {integrity: sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==}
peerDependencies:
'@types/markdown-it': '*'
markdown-it: '*'
dependencies:
'@types/markdown-it': 13.0.7
- markdown-it: 13.0.2
+ markdown-it: 14.1.0
dev: true
- /markdown-it-container@3.0.0:
- resolution: {integrity: sha512-y6oKTq4BB9OQuY/KLfk/O3ysFhB3IMYoIWhGJEidXt1NQFocFK2sA2t0NYZAMyMShAGL6x5OPIbrmXPIqaN9rw==}
+ /markdown-it-container@4.0.0:
+ resolution: {integrity: sha512-HaNccxUH0l7BNGYbFbjmGpf5aLHAMTinqRZQAEQbMr2cdD3z91Q6kIo1oUn1CQndkT03jat6ckrdRYuwwqLlQw==}
dev: true
- /markdown-it-emoji@2.0.2:
- resolution: {integrity: sha512-zLftSaNrKuYl0kR5zm4gxXjHaOI3FAOEaloKmRA5hijmJZvSjmxcokOLlzycb/HXlUFWzXqpIEoyEMCE4i9MvQ==}
+ /markdown-it-emoji@3.0.0:
+ resolution: {integrity: sha512-+rUD93bXHubA4arpEZO3q80so0qgoFJEKRkRbjKX8RTdca89v2kfyF+xR3i2sQTwql9tpPZPOQN5B+PunspXRg==}
dev: true
- /markdown-it@13.0.2:
- resolution: {integrity: sha512-FtwnEuuK+2yVU7goGn/MJ0WBZMM9ZPgU9spqlFs7/A/pDIUNSOQZhUgOqYCficIuR2QaFnrt8LHqBWsbTAoI5w==}
+ /markdown-it@14.1.0:
+ resolution: {integrity: sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==}
hasBin: true
dependencies:
argparse: 2.0.1
- entities: 3.0.1
- linkify-it: 4.0.1
- mdurl: 1.0.1
- uc.micro: 1.0.6
+ entities: 4.5.0
+ linkify-it: 5.0.0
+ mdurl: 2.0.0
+ punycode.js: 2.3.1
+ uc.micro: 2.1.0
+ dev: true
+
+ /mdn-data@2.0.28:
+ resolution: {integrity: sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==}
+ dev: true
+
+ /mdurl@2.0.0:
+ resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==}
dev: true
- /mdurl@1.0.1:
- resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==}
+ /media-typer@0.3.0:
+ resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==}
+ engines: {node: '>= 0.6'}
dev: true
/medium-zoom@1.1.0:
resolution: {integrity: sha512-ewyDsp7k4InCUp3jRmwHBRFGyjBimKps/AJLjRSox+2q/2H4p/PNpQf+pwONWlJiOudkBXtbdmVbFjqyybfTmQ==}
dev: true
+ /memfs@4.8.0:
+ resolution: {integrity: sha512-fcs7trFxZlOMadmTw5nyfOwS3il9pr3y+6xzLfXNwmuR/D0i4wz6rJURxArAbcJDGalbpbMvQ/IFI0NojRZgRg==}
+ engines: {node: '>= 4.0.0'}
+ dependencies:
+ tslib: 2.6.2
+ dev: true
+
+ /merge-descriptors@1.0.1:
+ resolution: {integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==}
+ dev: true
+
/merge-stream@2.0.0:
resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==}
dev: true
@@ -1758,6 +3191,11 @@ packages:
engines: {node: '>= 8'}
dev: true
+ /methods@1.1.2:
+ resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
/micromatch@4.0.5:
resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==}
engines: {node: '>=8.6'}
@@ -1766,6 +3204,24 @@ packages:
picomatch: 2.3.1
dev: true
+ /mime-db@1.52.0:
+ resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
+ /mime-types@2.1.35:
+ resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
+ engines: {node: '>= 0.6'}
+ dependencies:
+ mime-db: 1.52.0
+ dev: true
+
+ /mime@1.6.0:
+ resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==}
+ engines: {node: '>=4'}
+ hasBin: true
+ dev: true
+
/mimic-fn@2.1.0:
resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==}
engines: {node: '>=6'}
@@ -1776,16 +3232,80 @@ packages:
engines: {node: '>=12'}
dev: true
+ /mini-css-extract-plugin@2.8.1(webpack@5.91.0):
+ resolution: {integrity: sha512-/1HDlyFRxWIZPI1ZpgqlZ8jMw/1Dp/dl3P0L1jtZ+zVcHqwPhGwaJwKL00WVgfnBy6PWCde9W65or7IIETImuA==}
+ engines: {node: '>= 12.13.0'}
+ peerDependencies:
+ webpack: ^5.0.0
+ dependencies:
+ schema-utils: 4.2.0
+ tapable: 2.2.1
+ webpack: 5.91.0
+ dev: true
+
+ /minimalistic-assert@1.0.1:
+ resolution: {integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==}
+ dev: true
+
+ /minimatch@9.0.3:
+ resolution: {integrity: sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==}
+ engines: {node: '>=16 || 14 >=14.17'}
+ dependencies:
+ brace-expansion: 2.0.1
+ dev: true
+
+ /minipass@7.0.4:
+ resolution: {integrity: sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==}
+ engines: {node: '>=16 || 14 >=14.17'}
+ dev: true
+
+ /ms@2.0.0:
+ resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==}
+ dev: true
+
/ms@2.1.2:
resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==}
dev: true
+ /ms@2.1.3:
+ resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
+ dev: true
+
+ /multicast-dns@7.2.5:
+ resolution: {integrity: sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==}
+ hasBin: true
+ dependencies:
+ dns-packet: 5.6.1
+ thunky: 1.1.0
+ dev: true
+
/nanoid@3.3.7:
resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==}
engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
hasBin: true
dev: true
+ /negotiator@0.6.3:
+ resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
+ /neo-async@2.6.2:
+ resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==}
+ dev: true
+
+ /no-case@3.0.4:
+ resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==}
+ dependencies:
+ lower-case: 2.0.2
+ tslib: 2.6.2
+ dev: true
+
+ /node-forge@1.3.1:
+ resolution: {integrity: sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==}
+ engines: {node: '>= 6.13.0'}
+ dev: true
+
/node-releases@2.0.14:
resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==}
dev: true
@@ -1800,8 +3320,15 @@ packages:
engines: {node: '>=0.10.0'}
dev: true
- /npm-run-path@5.1.0:
- resolution: {integrity: sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==}
+ /npm-run-path@4.0.1:
+ resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==}
+ engines: {node: '>=8'}
+ dependencies:
+ path-key: 3.1.1
+ dev: true
+
+ /npm-run-path@5.3.0:
+ resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
dependencies:
path-key: 4.0.0
@@ -1813,6 +3340,26 @@ packages:
boolbase: 1.0.0
dev: true
+ /object-inspect@1.13.1:
+ resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==}
+ dev: true
+
+ /obuf@1.1.2:
+ resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==}
+ dev: true
+
+ /on-finished@2.4.1:
+ resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==}
+ engines: {node: '>= 0.8'}
+ dependencies:
+ ee-first: 1.1.1
+ dev: true
+
+ /on-headers@1.0.2:
+ resolution: {integrity: sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==}
+ engines: {node: '>= 0.8'}
+ dev: true
+
/onetime@5.1.2:
resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==}
engines: {node: '>=6'}
@@ -1827,21 +3374,64 @@ packages:
mimic-fn: 4.0.0
dev: true
- /ora@7.0.1:
- resolution: {integrity: sha512-0TUxTiFJWv+JnjWm4o9yvuskpEJLXTcng8MJuKd+SzAzp2o+OP3HWqNhB4OdJRt1Vsd9/mR0oyaEYlOnL7XIRw==}
- engines: {node: '>=16'}
+ /open@10.1.0:
+ resolution: {integrity: sha512-mnkeQ1qP5Ue2wd+aivTD3NHd/lZ96Lu0jgf0pwktLPtx6cTZiH7tyeGRRHs0zX0rbrahXPnXlUnbeXyaBBuIaw==}
+ engines: {node: '>=18'}
+ dependencies:
+ default-browser: 5.2.1
+ define-lazy-prop: 3.0.0
+ is-inside-container: 1.0.0
+ is-wsl: 3.1.0
+ dev: true
+
+ /ora@8.0.1:
+ resolution: {integrity: sha512-ANIvzobt1rls2BDny5fWZ3ZVKyD6nscLvfFRpQgfWsythlcsVUC9kL0zq6j2Z5z9wwp1kd7wpsD/T9qNPVLCaQ==}
+ engines: {node: '>=18'}
dependencies:
chalk: 5.3.0
cli-cursor: 4.0.0
cli-spinners: 2.9.2
is-interactive: 2.0.0
- is-unicode-supported: 1.3.0
- log-symbols: 5.1.0
- stdin-discarder: 0.1.0
- string-width: 6.1.0
+ is-unicode-supported: 2.0.0
+ log-symbols: 6.0.0
+ stdin-discarder: 0.2.2
+ string-width: 7.1.0
strip-ansi: 7.1.0
dev: true
+ /p-retry@6.2.0:
+ resolution: {integrity: sha512-JA6nkq6hKyWLLasXQXUrO4z8BUZGUt/LjlJxx8Gb2+2ntodU/SS63YZ8b0LUTbQ8ZB9iwOfhEPhg4ykKnn2KsA==}
+ engines: {node: '>=16.17'}
+ dependencies:
+ '@types/retry': 0.12.2
+ is-network-error: 1.1.0
+ retry: 0.13.1
+ dev: true
+
+ /param-case@3.0.4:
+ resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==}
+ dependencies:
+ dot-case: 3.0.4
+ tslib: 2.6.2
+ dev: true
+
+ /parent-module@1.0.1:
+ resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
+ engines: {node: '>=6'}
+ dependencies:
+ callsites: 3.1.0
+ dev: true
+
+ /parse-json@5.2.0:
+ resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==}
+ engines: {node: '>=8'}
+ dependencies:
+ '@babel/code-frame': 7.24.2
+ error-ex: 1.3.2
+ json-parse-even-better-errors: 2.3.1
+ lines-and-columns: 1.2.4
+ dev: true
+
/parse5-htmlparser2-tree-adapter@7.0.0:
resolution: {integrity: sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==}
dependencies:
@@ -1855,6 +3445,18 @@ packages:
entities: 4.5.0
dev: true
+ /parseurl@1.3.3:
+ resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==}
+ engines: {node: '>= 0.8'}
+ dev: true
+
+ /pascal-case@3.1.2:
+ resolution: {integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==}
+ dependencies:
+ no-case: 3.0.4
+ tslib: 2.6.2
+ dev: true
+
/path-key@3.1.1:
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
engines: {node: '>=8'}
@@ -1865,6 +3467,18 @@ packages:
engines: {node: '>=12'}
dev: true
+ /path-scurry@1.10.1:
+ resolution: {integrity: sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==}
+ engines: {node: '>=16 || 14 >=14.17'}
+ dependencies:
+ lru-cache: 10.2.0
+ minipass: 7.0.4
+ dev: true
+
+ /path-to-regexp@0.1.7:
+ resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==}
+ dev: true
+
/path-type@5.0.0:
resolution: {integrity: sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==}
engines: {node: '>=12'}
@@ -1879,38 +3493,109 @@ packages:
engines: {node: '>=8.6'}
dev: true
- /postcss-load-config@4.0.2(postcss@8.4.32):
- resolution: {integrity: sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==}
- engines: {node: '>= 14'}
+ /postcss-csso@6.0.1(postcss@8.4.38):
+ resolution: {integrity: sha512-ZV4yEziMrx6CEiqabGLrDva0pMD7Fbw7yP+LzJvaynM4OJgTssGN6dHiMsJMJdpmNaLJltXVLsrb/5sxbFa8sA==}
+ engines: {node: ^12.20.0 || ^14.13.0 || >=15.0.0, npm: '>=7.0.0'}
+ peerDependencies:
+ postcss: ^8.0.0
+ dependencies:
+ csso: 5.0.5
+ postcss: 8.4.38
+ dev: true
+
+ /postcss-loader@8.1.1(postcss@8.4.38)(webpack@5.91.0):
+ resolution: {integrity: sha512-0IeqyAsG6tYiDRCYKQJLAmgQr47DX6N7sFSWvQxt6AcupX8DIdmykuk/o/tx0Lze3ErGHJEp5OSRxrelC6+NdQ==}
+ engines: {node: '>= 18.12.0'}
peerDependencies:
- postcss: '>=8.0.9'
- ts-node: '>=9.0.0'
+ '@rspack/core': 0.x || 1.x
+ postcss: ^7.0.0 || ^8.0.1
+ webpack: ^5.0.0
peerDependenciesMeta:
- postcss:
+ '@rspack/core':
optional: true
- ts-node:
+ webpack:
optional: true
dependencies:
- lilconfig: 3.0.0
- postcss: 8.4.32
- yaml: 2.3.4
+ cosmiconfig: 9.0.0
+ jiti: 1.21.0
+ postcss: 8.4.38
+ semver: 7.6.0
+ webpack: 5.91.0
+ transitivePeerDependencies:
+ - typescript
+ dev: true
+
+ /postcss-modules-extract-imports@3.0.0(postcss@8.4.38):
+ resolution: {integrity: sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw==}
+ engines: {node: ^10 || ^12 || >= 14}
+ peerDependencies:
+ postcss: ^8.1.0
+ dependencies:
+ postcss: 8.4.38
+ dev: true
+
+ /postcss-modules-local-by-default@4.0.4(postcss@8.4.38):
+ resolution: {integrity: sha512-L4QzMnOdVwRm1Qb8m4x8jsZzKAaPAgrUF1r/hjDR2Xj7R+8Zsf97jAlSQzWtKx5YNiNGN8QxmPFIc/sh+RQl+Q==}
+ engines: {node: ^10 || ^12 || >= 14}
+ peerDependencies:
+ postcss: ^8.1.0
+ dependencies:
+ icss-utils: 5.1.0(postcss@8.4.38)
+ postcss: 8.4.38
+ postcss-selector-parser: 6.0.16
+ postcss-value-parser: 4.2.0
+ dev: true
+
+ /postcss-modules-scope@3.1.1(postcss@8.4.38):
+ resolution: {integrity: sha512-uZgqzdTleelWjzJY+Fhti6F3C9iF1JR/dODLs/JDefozYcKTBCdD8BIl6nNPbTbcLnGrk56hzwZC2DaGNvYjzA==}
+ engines: {node: ^10 || ^12 || >= 14}
+ peerDependencies:
+ postcss: ^8.1.0
+ dependencies:
+ postcss: 8.4.38
+ postcss-selector-parser: 6.0.16
+ dev: true
+
+ /postcss-modules-values@4.0.0(postcss@8.4.38):
+ resolution: {integrity: sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==}
+ engines: {node: ^10 || ^12 || >= 14}
+ peerDependencies:
+ postcss: ^8.1.0
+ dependencies:
+ icss-utils: 5.1.0(postcss@8.4.38)
+ postcss: 8.4.38
+ dev: true
+
+ /postcss-selector-parser@6.0.16:
+ resolution: {integrity: sha512-A0RVJrX+IUkVZbW3ClroRWurercFhieevHB38sr2+l9eUClMqome3LmEmnhlNy+5Mr2EYN6B2Kaw9wYdd+VHiw==}
+ engines: {node: '>=4'}
+ dependencies:
+ cssesc: 3.0.0
+ util-deprecate: 1.0.2
dev: true
/postcss-value-parser@4.2.0:
resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==}
dev: true
- /postcss@8.4.32:
- resolution: {integrity: sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==}
+ /postcss@8.4.38:
+ resolution: {integrity: sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==}
engines: {node: ^10 || ^12 || >=14}
dependencies:
nanoid: 3.3.7
picocolors: 1.0.0
- source-map-js: 1.0.2
+ source-map-js: 1.2.0
+ dev: true
+
+ /preact@10.20.1:
+ resolution: {integrity: sha512-JIFjgFg9B2qnOoGiYMVBtrcFxHqn+dNXbq76bVmcaHYJFYR4lW67AOcXgAYQQTDYXDOg/kTZrKPNCdRgJ2UJmw==}
dev: true
- /preact@10.19.3:
- resolution: {integrity: sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ==}
+ /pretty-error@4.0.0:
+ resolution: {integrity: sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==}
+ dependencies:
+ lodash: 4.17.21
+ renderkid: 3.0.0
dev: true
/prismjs@1.29.0:
@@ -1918,10 +3603,72 @@ packages:
engines: {node: '>=6'}
dev: true
+ /process-nextick-args@2.0.1:
+ resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
+ dev: true
+
+ /proxy-addr@2.0.7:
+ resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
+ engines: {node: '>= 0.10'}
+ dependencies:
+ forwarded: 0.2.0
+ ipaddr.js: 1.9.1
+ dev: true
+
+ /punycode.js@2.3.1:
+ resolution: {integrity: sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /punycode@2.3.1:
+ resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /qs@6.11.0:
+ resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==}
+ engines: {node: '>=0.6'}
+ dependencies:
+ side-channel: 1.0.6
+ dev: true
+
/queue-microtask@1.2.3:
resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
dev: true
+ /randombytes@2.1.0:
+ resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==}
+ dependencies:
+ safe-buffer: 5.2.1
+ dev: true
+
+ /range-parser@1.2.1:
+ resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
+ /raw-body@2.5.2:
+ resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==}
+ engines: {node: '>= 0.8'}
+ dependencies:
+ bytes: 3.1.2
+ http-errors: 2.0.0
+ iconv-lite: 0.4.24
+ unpipe: 1.0.0
+ dev: true
+
+ /readable-stream@2.3.8:
+ resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==}
+ dependencies:
+ core-util-is: 1.0.3
+ inherits: 2.0.4
+ isarray: 1.0.0
+ process-nextick-args: 2.0.1
+ safe-buffer: 5.1.2
+ string_decoder: 1.1.1
+ util-deprecate: 1.0.2
+ dev: true
+
/readable-stream@3.6.2:
resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==}
engines: {node: '>= 6'}
@@ -1938,6 +3685,39 @@ packages:
picomatch: 2.3.1
dev: true
+ /relateurl@0.2.7:
+ resolution: {integrity: sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==}
+ engines: {node: '>= 0.10'}
+ dev: true
+
+ /renderkid@3.0.0:
+ resolution: {integrity: sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==}
+ dependencies:
+ css-select: 4.3.0
+ dom-converter: 0.2.0
+ htmlparser2: 6.1.0
+ lodash: 4.17.21
+ strip-ansi: 6.0.1
+ dev: true
+
+ /require-from-string@2.0.2:
+ resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /requires-port@1.0.0:
+ resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==}
+ dev: true
+
+ /resolve-from@4.0.0:
+ resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /resolve-pkg-maps@1.0.0:
+ resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
+ dev: true
+
/restore-cursor@4.0.0:
resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
@@ -1946,30 +3726,27 @@ packages:
signal-exit: 3.0.7
dev: true
+ /retry@0.13.1:
+ resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==}
+ engines: {node: '>= 4'}
+ dev: true
+
/reusify@1.0.4:
resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==}
engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
dev: true
- /rollup@4.8.0:
- resolution: {integrity: sha512-NpsklK2fach5CdI+PScmlE5R4Ao/FSWtF7LkoIrHDxPACY/xshNasPsbpG0VVHxUTbf74tJbVT4PrP8JsJ6ZDA==}
- engines: {node: '>=18.0.0', npm: '>=8.0.0'}
+ /rimraf@5.0.5:
+ resolution: {integrity: sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A==}
+ engines: {node: '>=14'}
hasBin: true
- optionalDependencies:
- '@rollup/rollup-android-arm-eabi': 4.8.0
- '@rollup/rollup-android-arm64': 4.8.0
- '@rollup/rollup-darwin-arm64': 4.8.0
- '@rollup/rollup-darwin-x64': 4.8.0
- '@rollup/rollup-linux-arm-gnueabihf': 4.8.0
- '@rollup/rollup-linux-arm64-gnu': 4.8.0
- '@rollup/rollup-linux-arm64-musl': 4.8.0
- '@rollup/rollup-linux-riscv64-gnu': 4.8.0
- '@rollup/rollup-linux-x64-gnu': 4.8.0
- '@rollup/rollup-linux-x64-musl': 4.8.0
- '@rollup/rollup-win32-arm64-msvc': 4.8.0
- '@rollup/rollup-win32-ia32-msvc': 4.8.0
- '@rollup/rollup-win32-x64-msvc': 4.8.0
- fsevents: 2.3.3
+ dependencies:
+ glob: 10.3.10
+ dev: true
+
+ /run-applescript@7.0.0:
+ resolution: {integrity: sha512-9by4Ij99JUr/MCFBUkDKLWK3G9HVXmabKz9U5MlIAIuvuzkiOicRYs8XJLxX+xahD+mLiiCYDqF9dKAgtzKP1A==}
+ engines: {node: '>=18'}
dev: true
/run-parallel@1.2.0:
@@ -1978,24 +3755,76 @@ packages:
queue-microtask: 1.2.3
dev: true
+ /safe-buffer@5.1.2:
+ resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
+ dev: true
+
/safe-buffer@5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
dev: true
- /sass@1.69.5:
- resolution: {integrity: sha512-qg2+UCJibLr2LCVOt3OlPhr/dqVHWOa9XtZf2OjbLs/T4VPSJ00udtgJxH3neXZm+QqX8B+3cU7RaLqp1iVfcQ==}
+ /safer-buffer@2.1.2:
+ resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
+ dev: true
+
+ /sass-loader@14.1.1(webpack@5.91.0):
+ resolution: {integrity: sha512-QX8AasDg75monlybel38BZ49JP5Z+uSKfKwF2rO7S74BywaRmGQMUBw9dtkS+ekyM/QnP+NOrRYq8ABMZ9G8jw==}
+ engines: {node: '>= 18.12.0'}
+ peerDependencies:
+ '@rspack/core': 0.x || 1.x
+ node-sass: ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0
+ sass: ^1.3.0
+ sass-embedded: '*'
+ webpack: ^5.0.0
+ peerDependenciesMeta:
+ '@rspack/core':
+ optional: true
+ node-sass:
+ optional: true
+ sass:
+ optional: true
+ sass-embedded:
+ optional: true
+ webpack:
+ optional: true
+ dependencies:
+ neo-async: 2.6.2
+ webpack: 5.91.0
+ dev: true
+
+ /sass@1.72.0:
+ resolution: {integrity: sha512-Gpczt3WA56Ly0Mn8Sl21Vj94s1axi9hDIzDFn9Ph9x3C3p4nNyvsqJoQyVXKou6cBlfFWEgRW4rT8Tb4i3XnVA==}
engines: {node: '>=14.0.0'}
hasBin: true
dependencies:
- chokidar: 3.5.3
- immutable: 4.3.4
- source-map-js: 1.0.2
+ chokidar: 3.6.0
+ immutable: 4.3.5
+ source-map-js: 1.2.0
dev: true
/sax@1.3.0:
resolution: {integrity: sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==}
dev: true
+ /schema-utils@3.3.0:
+ resolution: {integrity: sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==}
+ engines: {node: '>= 10.13.0'}
+ dependencies:
+ '@types/json-schema': 7.0.15
+ ajv: 6.12.6
+ ajv-keywords: 3.5.2(ajv@6.12.6)
+ dev: true
+
+ /schema-utils@4.2.0:
+ resolution: {integrity: sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw==}
+ engines: {node: '>= 12.13.0'}
+ dependencies:
+ '@types/json-schema': 7.0.15
+ ajv: 8.12.0
+ ajv-formats: 2.1.1(ajv@8.12.0)
+ ajv-keywords: 5.1.0(ajv@8.12.0)
+ dev: true
+
/search-insights@2.13.0:
resolution: {integrity: sha512-Orrsjf9trHHxFRuo9/rzm0KIWmgzE8RMlZMzuhZOJ01Rnz3D0YBAe+V6473t6/H6c7irs6Lt48brULAiRWb3Vw==}
dev: true
@@ -2008,14 +3837,107 @@ packages:
kind-of: 6.0.3
dev: true
- /semver@7.5.4:
- resolution: {integrity: sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==}
+ /select-hose@2.0.0:
+ resolution: {integrity: sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==}
+ dev: true
+
+ /selfsigned@2.4.1:
+ resolution: {integrity: sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==}
+ engines: {node: '>=10'}
+ dependencies:
+ '@types/node-forge': 1.3.11
+ node-forge: 1.3.1
+ dev: true
+
+ /semver@7.6.0:
+ resolution: {integrity: sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==}
engines: {node: '>=10'}
hasBin: true
dependencies:
lru-cache: 6.0.0
dev: true
+ /send@0.18.0:
+ resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==}
+ engines: {node: '>= 0.8.0'}
+ dependencies:
+ debug: 2.6.9
+ depd: 2.0.0
+ destroy: 1.2.0
+ encodeurl: 1.0.2
+ escape-html: 1.0.3
+ etag: 1.8.1
+ fresh: 0.5.2
+ http-errors: 2.0.0
+ mime: 1.6.0
+ ms: 2.1.3
+ on-finished: 2.4.1
+ range-parser: 1.2.1
+ statuses: 2.0.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /serialize-javascript@6.0.2:
+ resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==}
+ dependencies:
+ randombytes: 2.1.0
+ dev: true
+
+ /serve-index@1.9.1:
+ resolution: {integrity: sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==}
+ engines: {node: '>= 0.8.0'}
+ dependencies:
+ accepts: 1.3.8
+ batch: 0.6.1
+ debug: 2.6.9
+ escape-html: 1.0.3
+ http-errors: 1.6.3
+ mime-types: 2.1.35
+ parseurl: 1.3.3
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /serve-static@1.15.0:
+ resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==}
+ engines: {node: '>= 0.8.0'}
+ dependencies:
+ encodeurl: 1.0.2
+ escape-html: 1.0.3
+ parseurl: 1.3.3
+ send: 0.18.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /set-function-length@1.2.2:
+ resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ define-data-property: 1.1.4
+ es-errors: 1.3.0
+ function-bind: 1.1.2
+ get-intrinsic: 1.2.4
+ gopd: 1.0.1
+ has-property-descriptors: 1.0.2
+ dev: true
+
+ /setprototypeof@1.1.0:
+ resolution: {integrity: sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==}
+ dev: true
+
+ /setprototypeof@1.2.0:
+ resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==}
+ dev: true
+
+ /shallow-clone@3.0.1:
+ resolution: {integrity: sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==}
+ engines: {node: '>=8'}
+ dependencies:
+ kind-of: 6.0.3
+ dev: true
+
/shebang-command@2.0.0:
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
engines: {node: '>=8'}
@@ -2028,6 +3950,20 @@ packages:
engines: {node: '>=8'}
dev: true
+ /shell-quote@1.8.1:
+ resolution: {integrity: sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==}
+ dev: true
+
+ /side-channel@1.0.6:
+ resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.7
+ es-errors: 1.3.0
+ get-intrinsic: 1.2.4
+ object-inspect: 1.13.1
+ dev: true
+
/signal-exit@3.0.7:
resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==}
dev: true
@@ -2053,37 +3989,126 @@ packages:
engines: {node: '>=14.16'}
dev: true
- /source-map-js@1.0.2:
- resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==}
+ /sockjs@0.3.24:
+ resolution: {integrity: sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==}
+ dependencies:
+ faye-websocket: 0.11.4
+ uuid: 8.3.2
+ websocket-driver: 0.7.4
+ dev: true
+
+ /source-list-map@2.0.1:
+ resolution: {integrity: sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==}
+ dev: true
+
+ /source-map-js@1.2.0:
+ resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /source-map-support@0.5.21:
+ resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
+ dependencies:
+ buffer-from: 1.1.2
+ source-map: 0.6.1
+ dev: true
+
+ /source-map@0.6.1:
+ resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
dev: true
+ /spdy-transport@3.0.0:
+ resolution: {integrity: sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==}
+ dependencies:
+ debug: 4.3.4
+ detect-node: 2.1.0
+ hpack.js: 2.1.6
+ obuf: 1.1.2
+ readable-stream: 3.6.2
+ wbuf: 1.7.3
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /spdy@4.0.2:
+ resolution: {integrity: sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ debug: 4.3.4
+ handle-thing: 2.0.1
+ http-deceiver: 1.2.7
+ select-hose: 2.0.0
+ spdy-transport: 3.0.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
/sprintf-js@1.0.3:
resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==}
dev: true
- /stdin-discarder@0.1.0:
- resolution: {integrity: sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==}
- engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+ /statuses@1.5.0:
+ resolution: {integrity: sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
+ /statuses@2.0.1:
+ resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==}
+ engines: {node: '>= 0.8'}
+ dev: true
+
+ /stdin-discarder@0.2.2:
+ resolution: {integrity: sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==}
+ engines: {node: '>=18'}
+ dev: true
+
+ /string-width@4.2.3:
+ resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
+ engines: {node: '>=8'}
dependencies:
- bl: 5.1.0
+ emoji-regex: 8.0.0
+ is-fullwidth-code-point: 3.0.0
+ strip-ansi: 6.0.1
dev: true
- /string-width@6.1.0:
- resolution: {integrity: sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ==}
- engines: {node: '>=16'}
+ /string-width@5.1.2:
+ resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==}
+ engines: {node: '>=12'}
dependencies:
eastasianwidth: 0.2.0
+ emoji-regex: 9.2.2
+ strip-ansi: 7.1.0
+ dev: true
+
+ /string-width@7.1.0:
+ resolution: {integrity: sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw==}
+ engines: {node: '>=18'}
+ dependencies:
emoji-regex: 10.3.0
+ get-east-asian-width: 1.2.0
strip-ansi: 7.1.0
dev: true
+ /string_decoder@1.1.1:
+ resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==}
+ dependencies:
+ safe-buffer: 5.1.2
+ dev: true
+
/string_decoder@1.3.0:
resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
dependencies:
safe-buffer: 5.2.1
dev: true
+ /strip-ansi@6.0.1:
+ resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
+ engines: {node: '>=8'}
+ dependencies:
+ ansi-regex: 5.0.1
+ dev: true
+
/strip-ansi@7.1.0:
resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==}
engines: {node: '>=12'}
@@ -2096,13 +4121,88 @@ packages:
engines: {node: '>=0.10.0'}
dev: true
+ /strip-final-newline@2.0.0:
+ resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==}
+ engines: {node: '>=6'}
+ dev: true
+
/strip-final-newline@3.0.0:
resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==}
engines: {node: '>=12'}
dev: true
- /striptags@3.2.0:
- resolution: {integrity: sha512-g45ZOGzHDMe2bdYMdIvdAfCQkCTDMGBazSw1ypMowwGIee7ZQ5dU0rBJ8Jqgl+jAKIv4dbeE1jscZq9wid1Tkw==}
+ /style-loader@3.3.4(webpack@5.91.0):
+ resolution: {integrity: sha512-0WqXzrsMTyb8yjZJHDqwmnwRJvhALK9LfRtRc6B4UTWe8AijYLZYZ9thuJTZc2VfQWINADW/j+LiJnfy2RoC1w==}
+ engines: {node: '>= 12.13.0'}
+ peerDependencies:
+ webpack: ^5.0.0
+ dependencies:
+ webpack: 5.91.0
+ dev: true
+
+ /supports-color@5.5.0:
+ resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
+ engines: {node: '>=4'}
+ dependencies:
+ has-flag: 3.0.0
+ dev: true
+
+ /supports-color@7.2.0:
+ resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
+ engines: {node: '>=8'}
+ dependencies:
+ has-flag: 4.0.0
+ dev: true
+
+ /supports-color@8.1.1:
+ resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==}
+ engines: {node: '>=10'}
+ dependencies:
+ has-flag: 4.0.0
+ dev: true
+
+ /tapable@2.2.1:
+ resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /terser-webpack-plugin@5.3.10(webpack@5.91.0):
+ resolution: {integrity: sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w==}
+ engines: {node: '>= 10.13.0'}
+ peerDependencies:
+ '@swc/core': '*'
+ esbuild: '*'
+ uglify-js: '*'
+ webpack: ^5.1.0
+ peerDependenciesMeta:
+ '@swc/core':
+ optional: true
+ esbuild:
+ optional: true
+ uglify-js:
+ optional: true
+ dependencies:
+ '@jridgewell/trace-mapping': 0.3.25
+ jest-worker: 27.5.1
+ schema-utils: 3.3.0
+ serialize-javascript: 6.0.2
+ terser: 5.29.2
+ webpack: 5.91.0
+ dev: true
+
+ /terser@5.29.2:
+ resolution: {integrity: sha512-ZiGkhUBIM+7LwkNjXYJq8svgkd+QK3UUr0wJqY4MieaezBSAIPgbSPZyIx0idM6XWK5CMzSWa8MJIzmRcB8Caw==}
+ engines: {node: '>=10'}
+ hasBin: true
+ dependencies:
+ '@jridgewell/source-map': 0.3.6
+ acorn: 8.11.3
+ commander: 2.20.3
+ source-map-support: 0.5.21
+ dev: true
+
+ /thunky@1.1.0:
+ resolution: {integrity: sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==}
dev: true
/to-fast-properties@2.0.0:
@@ -2117,12 +4217,29 @@ packages:
is-number: 7.0.0
dev: true
+ /toidentifier@1.0.1:
+ resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==}
+ engines: {node: '>=0.6'}
+ dev: true
+
/ts-debounce@4.0.0:
resolution: {integrity: sha512-+1iDGY6NmOGidq7i7xZGA4cm8DAa6fqdYcvO5Z6yBevH++Bdo9Qt/mN0TzHUgcCcKv1gmh9+W5dHqz8pMWbCbg==}
dev: true
- /uc.micro@1.0.6:
- resolution: {integrity: sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==}
+ /tslib@2.6.2:
+ resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==}
+ dev: true
+
+ /type-is@1.6.18:
+ resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==}
+ engines: {node: '>= 0.6'}
+ dependencies:
+ media-typer: 0.3.0
+ mime-types: 2.1.35
+ dev: true
+
+ /uc.micro@2.1.0:
+ resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==}
dev: true
/undici-types@5.26.5:
@@ -2139,63 +4256,58 @@ packages:
engines: {node: '>= 10.0.0'}
dev: true
+ /unpipe@1.0.0:
+ resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==}
+ engines: {node: '>= 0.8'}
+ dev: true
+
/upath@2.0.1:
resolution: {integrity: sha512-1uEe95xksV1O0CYKXo8vQvN1JEbtJp7lb7C5U9HMsIp6IVwntkH/oNUzyVNQSd4S1sYk2FpSSW44FqMc8qee5w==}
engines: {node: '>=4'}
dev: true
- /update-browserslist-db@1.0.13(browserslist@4.22.2):
+ /update-browserslist-db@1.0.13(browserslist@4.23.0):
resolution: {integrity: sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==}
hasBin: true
peerDependencies:
browserslist: '>= 4.21.0'
dependencies:
- browserslist: 4.22.2
- escalade: 3.1.1
+ browserslist: 4.23.0
+ escalade: 3.1.2
picocolors: 1.0.0
dev: true
+ /uri-js@4.4.1:
+ resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==}
+ dependencies:
+ punycode: 2.3.1
+ dev: true
+
/util-deprecate@1.0.2:
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
dev: true
- /vite@5.0.12:
- resolution: {integrity: sha512-4hsnEkG3q0N4Tzf1+t6NdN9dg/L3BM+q8SWgbSPnJvrgH2kgdyzfVJwbR1ic69/4uMJJ/3dqDZZE5/WwqW8U1w==}
- engines: {node: ^18.0.0 || >=20.0.0}
+ /utila@0.4.0:
+ resolution: {integrity: sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==}
+ dev: true
+
+ /utils-merge@1.0.1:
+ resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==}
+ engines: {node: '>= 0.4.0'}
+ dev: true
+
+ /uuid@8.3.2:
+ resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==}
hasBin: true
- peerDependencies:
- '@types/node': ^18.0.0 || >=20.0.0
- less: '*'
- lightningcss: ^1.21.0
- sass: '*'
- stylus: '*'
- sugarss: '*'
- terser: ^5.4.0
- peerDependenciesMeta:
- '@types/node':
- optional: true
- less:
- optional: true
- lightningcss:
- optional: true
- sass:
- optional: true
- stylus:
- optional: true
- sugarss:
- optional: true
- terser:
- optional: true
- dependencies:
- esbuild: 0.19.9
- postcss: 8.4.32
- rollup: 4.8.0
- optionalDependencies:
- fsevents: 2.3.3
dev: true
- /vue-demi@0.14.6(vue@3.3.11):
- resolution: {integrity: sha512-8QA7wrYSHKaYgUxDA5ZC24w+eHm3sYCbp0EzcDwKqN3p6HqtTCGR/GVsPyZW92unff4UlcSh++lmqDWN3ZIq4w==}
+ /vary@1.1.2:
+ resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==}
+ engines: {node: '>= 0.8'}
+ dev: true
+
+ /vue-demi@0.14.7(vue@3.4.21):
+ resolution: {integrity: sha512-EOG8KXDQNwkJILkx/gPcoL/7vH+hORoBaKgGe+6W7VFMvCYJfmF2dGbvgDroVnI8LU7/kTu8mbjRZGBU1z9NTA==}
engines: {node: '>=12'}
hasBin: true
requiresBuild: true
@@ -2206,145 +4318,244 @@ packages:
'@vue/composition-api':
optional: true
dependencies:
- vue: 3.3.11
+ vue: 3.4.21
+ dev: true
+
+ /vue-loader@17.4.2(vue@3.4.21)(webpack@5.91.0):
+ resolution: {integrity: sha512-yTKOA4R/VN4jqjw4y5HrynFL8AK0Z3/Jt7eOJXEitsm0GMRHDBjCfCiuTiLP7OESvsZYo2pATCWhDqxC5ZrM6w==}
+ peerDependencies:
+ '@vue/compiler-sfc': '*'
+ vue: '*'
+ webpack: ^4.1.0 || ^5.0.0-0
+ peerDependenciesMeta:
+ '@vue/compiler-sfc':
+ optional: true
+ vue:
+ optional: true
+ dependencies:
+ chalk: 4.1.2
+ hash-sum: 2.0.0
+ vue: 3.4.21
+ watchpack: 2.4.1
+ webpack: 5.91.0
dev: true
- /vue-router@4.2.5(vue@3.3.11):
- resolution: {integrity: sha512-DIUpKcyg4+PTQKfFPX88UWhlagBEBEfJ5A8XDXRJLUnZOvcpMF8o/dnL90vpVkGaPbjvXazV/rC1qBKrZlFugw==}
+ /vue-router@4.3.0(vue@3.4.21):
+ resolution: {integrity: sha512-dqUcs8tUeG+ssgWhcPbjHvazML16Oga5w34uCUmsk7i0BcnskoLGwjpa15fqMr2Fa5JgVBrdL2MEgqz6XZ/6IQ==}
peerDependencies:
vue: ^3.2.0
dependencies:
- '@vue/devtools-api': 6.5.1
- vue: 3.3.11
+ '@vue/devtools-api': 6.6.1
+ vue: 3.4.21
dev: true
- /vue@3.3.11:
- resolution: {integrity: sha512-d4oBctG92CRO1cQfVBZp6WJAs0n8AK4Xf5fNjQCBeKCvMI1efGQ5E3Alt1slFJS9fZuPcFoiAiqFvQlv1X7t/w==}
+ /vue@3.4.21:
+ resolution: {integrity: sha512-5hjyV/jLEIKD/jYl4cavMcnzKwjMKohureP8ejn3hhEjwhWIhWeuzL2kJAjzl/WyVsgPY56Sy4Z40C3lVshxXA==}
peerDependencies:
typescript: '*'
peerDependenciesMeta:
typescript:
optional: true
dependencies:
- '@vue/compiler-dom': 3.3.11
- '@vue/compiler-sfc': 3.3.11
- '@vue/runtime-dom': 3.3.11
- '@vue/server-renderer': 3.3.11(vue@3.3.11)
- '@vue/shared': 3.3.11
+ '@vue/compiler-dom': 3.4.21
+ '@vue/compiler-sfc': 3.4.21
+ '@vue/runtime-dom': 3.4.21
+ '@vue/server-renderer': 3.4.21(vue@3.4.21)
+ '@vue/shared': 3.4.21
dev: true
- /vuepress-plugin-sitemap2@2.0.0-rc.4(vuepress@2.0.0-rc.0):
- resolution: {integrity: sha512-zi57grbyAFL54HUZNmmAWELYgwPsqa8p63HkEBSpXiQEa3JbYumAXHPZp4sIBGlBxcF8X34GtddrVw9FDlCtZA==}
- engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'}
- deprecated: Please use @vuepress/plugin-sitemap@v2 instead
+ /vuepress@2.0.0-rc.9(@vuepress/bundler-webpack@2.0.0-rc.9)(vue@3.4.21):
+ resolution: {integrity: sha512-jT1ln2lawdph+vVI6n2JfEUhQIcyc1RQWDdQu9DffhJGywJunFcumnUJudpqd1SNIES2Fz1hVCD6gdrE/rVKOQ==}
+ engines: {node: '>=18.16.0'}
+ hasBin: true
peerDependencies:
- vuepress: 2.0.0-rc.0
- vuepress-vite: 2.0.0-rc.0
- vuepress-webpack: 2.0.0-rc.0
+ '@vuepress/bundler-vite': 2.0.0-rc.9
+ '@vuepress/bundler-webpack': 2.0.0-rc.9
+ vue: ^3.4.0
peerDependenciesMeta:
- vuepress:
+ '@vuepress/bundler-vite':
optional: true
- vuepress-vite:
- optional: true
- vuepress-webpack:
+ '@vuepress/bundler-webpack':
optional: true
dependencies:
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- sitemap: 7.1.1
- vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11)
- vuepress-shared: 2.0.0-rc.4(vuepress@2.0.0-rc.0)
+ '@vuepress/bundler-webpack': 2.0.0-rc.9
+ '@vuepress/cli': 2.0.0-rc.9
+ '@vuepress/client': 2.0.0-rc.9
+ '@vuepress/core': 2.0.0-rc.9
+ '@vuepress/markdown': 2.0.0-rc.9
+ '@vuepress/shared': 2.0.0-rc.9
+ '@vuepress/utils': 2.0.0-rc.9
+ vue: 3.4.21
transitivePeerDependencies:
- - '@vue/composition-api'
- supports-color
- typescript
dev: true
- /vuepress-shared@2.0.0-rc.4(vuepress@2.0.0-rc.0):
- resolution: {integrity: sha512-YndYftQ9AUdWWESZHFZ7QjuUGXqgVayHzu3Qfar9GWr45NP2ZW7edKN4adU2/bOiokYG1Rfj47dgMUrRxEgqhg==}
- engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'}
+ /watchpack@2.4.1:
+ resolution: {integrity: sha512-8wrBCMtVhqcXP2Sup1ctSkga6uc2Bx0IIvKyT7yTFier5AXHooSI+QyQQAtTb7+E0IUCCKyTFmXqdqgum2XWGg==}
+ engines: {node: '>=10.13.0'}
+ dependencies:
+ glob-to-regexp: 0.4.1
+ graceful-fs: 4.2.11
+ dev: true
+
+ /wbuf@1.7.3:
+ resolution: {integrity: sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==}
+ dependencies:
+ minimalistic-assert: 1.0.1
+ dev: true
+
+ /webpack-chain@6.5.1:
+ resolution: {integrity: sha512-7doO/SRtLu8q5WM0s7vPKPWX580qhi0/yBHkOxNkv50f6qB76Zy9o2wRTrrPULqYTvQlVHuvbA8v+G5ayuUDsA==}
+ engines: {node: '>=8'}
+ deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.
+ dependencies:
+ deepmerge: 1.5.2
+ javascript-stringify: 2.1.0
+ dev: true
+
+ /webpack-dev-middleware@7.1.1(webpack@5.91.0):
+ resolution: {integrity: sha512-NmRVq4AvRQs66dFWyDR4GsFDJggtSi2Yn38MXLk0nffgF9n/AIP4TFBg2TQKYaRAN4sHuKOTiz9BnNCENDLEVA==}
+ engines: {node: '>= 18.12.0'}
peerDependencies:
- vuepress: 2.0.0-rc.0
- vuepress-vite: 2.0.0-rc.0
- vuepress-webpack: 2.0.0-rc.0
+ webpack: ^5.0.0
peerDependenciesMeta:
- vuepress:
+ webpack:
optional: true
- vuepress-vite:
+ dependencies:
+ colorette: 2.0.20
+ memfs: 4.8.0
+ mime-types: 2.1.35
+ on-finished: 2.4.1
+ range-parser: 1.2.1
+ schema-utils: 4.2.0
+ webpack: 5.91.0
+ dev: true
+
+ /webpack-dev-server@5.0.4(webpack@5.91.0):
+ resolution: {integrity: sha512-dljXhUgx3HqKP2d8J/fUMvhxGhzjeNVarDLcbO/EWMSgRizDkxHQDZQaLFL5VJY9tRBj2Gz+rvCEYYvhbqPHNA==}
+ engines: {node: '>= 18.12.0'}
+ hasBin: true
+ peerDependencies:
+ webpack: ^5.0.0
+ webpack-cli: '*'
+ peerDependenciesMeta:
+ webpack:
optional: true
- vuepress-webpack:
+ webpack-cli:
optional: true
dependencies:
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/shared': 2.0.0-rc.0
- '@vuepress/utils': 2.0.0-rc.0
- '@vueuse/core': 10.7.0(vue@3.3.11)
- cheerio: 1.0.0-rc.12
- dayjs: 1.11.10
- execa: 8.0.1
- fflate: 0.8.1
- gray-matter: 4.0.3
- semver: 7.5.4
- striptags: 3.2.0
- vue: 3.3.11
- vue-router: 4.2.5(vue@3.3.11)
- vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11)
+ '@types/bonjour': 3.5.13
+ '@types/connect-history-api-fallback': 1.5.4
+ '@types/express': 4.17.21
+ '@types/serve-index': 1.9.4
+ '@types/serve-static': 1.15.5
+ '@types/sockjs': 0.3.36
+ '@types/ws': 8.5.10
+ ansi-html-community: 0.0.8
+ bonjour-service: 1.2.1
+ chokidar: 3.6.0
+ colorette: 2.0.20
+ compression: 1.7.4
+ connect-history-api-fallback: 2.0.0
+ default-gateway: 6.0.3
+ express: 4.19.2
+ graceful-fs: 4.2.11
+ html-entities: 2.5.2
+ http-proxy-middleware: 2.0.6(@types/express@4.17.21)
+ ipaddr.js: 2.1.0
+ launch-editor: 2.6.1
+ open: 10.1.0
+ p-retry: 6.2.0
+ rimraf: 5.0.5
+ schema-utils: 4.2.0
+ selfsigned: 2.4.1
+ serve-index: 1.9.1
+ sockjs: 0.3.24
+ spdy: 4.0.2
+ webpack: 5.91.0
+ webpack-dev-middleware: 7.1.1(webpack@5.91.0)
+ ws: 8.16.0
transitivePeerDependencies:
- - '@vue/composition-api'
+ - bufferutil
+ - debug
- supports-color
- - typescript
+ - utf-8-validate
dev: true
- /vuepress-vite@2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11):
- resolution: {integrity: sha512-+2XBejeiskPyr2raBeA2o4uDFDsjtadpUVmtio3qqFtQpOhidz/ORuiTLr2UfLtFn1ASIHP6Vy2YjQ0e/TeUVw==}
- engines: {node: '>=18.16.0'}
+ /webpack-merge@5.10.0:
+ resolution: {integrity: sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==}
+ engines: {node: '>=10.0.0'}
+ dependencies:
+ clone-deep: 4.0.1
+ flat: 5.0.2
+ wildcard: 2.0.1
+ dev: true
+
+ /webpack-sources@1.4.3:
+ resolution: {integrity: sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==}
+ dependencies:
+ source-list-map: 2.0.1
+ source-map: 0.6.1
+ dev: true
+
+ /webpack-sources@3.2.3:
+ resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==}
+ engines: {node: '>=10.13.0'}
+ dev: true
+
+ /webpack@5.91.0:
+ resolution: {integrity: sha512-rzVwlLeBWHJbmgTC/8TvAcu5vpJNII+MelQpylD4jNERPwpBJOE2lEcko1zJX3QJeLjTTAnQxn/OJ8bjDzVQaw==}
+ engines: {node: '>=10.13.0'}
hasBin: true
peerDependencies:
- '@vuepress/client': 2.0.0-rc.0
- vue: ^3.3.4
- dependencies:
- '@vuepress/bundler-vite': 2.0.0-rc.0
- '@vuepress/cli': 2.0.0-rc.0
- '@vuepress/client': 2.0.0-rc.0
- '@vuepress/core': 2.0.0-rc.0
- '@vuepress/theme-default': 2.0.0-rc.0
- vue: 3.3.11
+ webpack-cli: '*'
+ peerDependenciesMeta:
+ webpack-cli:
+ optional: true
+ dependencies:
+ '@types/eslint-scope': 3.7.7
+ '@types/estree': 1.0.5
+ '@webassemblyjs/ast': 1.12.1
+ '@webassemblyjs/wasm-edit': 1.12.1
+ '@webassemblyjs/wasm-parser': 1.12.1
+ acorn: 8.11.3
+ acorn-import-assertions: 1.9.0(acorn@8.11.3)
+ browserslist: 4.23.0
+ chrome-trace-event: 1.0.3
+ enhanced-resolve: 5.16.0
+ es-module-lexer: 1.5.0
+ eslint-scope: 5.1.1
+ events: 3.3.0
+ glob-to-regexp: 0.4.1
+ graceful-fs: 4.2.11
+ json-parse-even-better-errors: 2.3.1
+ loader-runner: 4.3.0
+ mime-types: 2.1.35
+ neo-async: 2.6.2
+ schema-utils: 3.3.0
+ tapable: 2.2.1
+ terser-webpack-plugin: 5.3.10(webpack@5.91.0)
+ watchpack: 2.4.1
+ webpack-sources: 3.2.3
transitivePeerDependencies:
- - '@types/node'
- - '@vue/composition-api'
- - less
- - lightningcss
- - sass
- - sass-loader
- - stylus
- - sugarss
- - supports-color
- - terser
- - ts-node
- - typescript
+ - '@swc/core'
+ - esbuild
+ - uglify-js
dev: true
- /vuepress@2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11):
- resolution: {integrity: sha512-sydt/B7+pIw926G5PntYmptLkC5o2buXKh+WR1+P2KnsvkXU+UGnQrJJ0FBvu/4RNuY99tkUZd59nyPhEmRrCg==}
- engines: {node: '>=18.16.0'}
- hasBin: true
+ /websocket-driver@0.7.4:
+ resolution: {integrity: sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==}
+ engines: {node: '>=0.8.0'}
dependencies:
- vuepress-vite: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11)
- transitivePeerDependencies:
- - '@types/node'
- - '@vue/composition-api'
- - '@vuepress/client'
- - less
- - lightningcss
- - sass
- - sass-loader
- - stylus
- - sugarss
- - supports-color
- - terser
- - ts-node
- - typescript
- - vue
+ http-parser-js: 0.5.8
+ safe-buffer: 5.2.1
+ websocket-extensions: 0.1.4
+ dev: true
+
+ /websocket-extensions@0.1.4:
+ resolution: {integrity: sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==}
+ engines: {node: '>=0.8.0'}
dev: true
/which@2.0.2:
@@ -2355,11 +4566,41 @@ packages:
isexe: 2.0.0
dev: true
- /yallist@4.0.0:
- resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==}
+ /wildcard@2.0.1:
+ resolution: {integrity: sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==}
+ dev: true
+
+ /wrap-ansi@7.0.0:
+ resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
+ engines: {node: '>=10'}
+ dependencies:
+ ansi-styles: 4.3.0
+ string-width: 4.2.3
+ strip-ansi: 6.0.1
+ dev: true
+
+ /wrap-ansi@8.1.0:
+ resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==}
+ engines: {node: '>=12'}
+ dependencies:
+ ansi-styles: 6.2.1
+ string-width: 5.1.2
+ strip-ansi: 7.1.0
dev: true
- /yaml@2.3.4:
- resolution: {integrity: sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==}
- engines: {node: '>= 14'}
+ /ws@8.16.0:
+ resolution: {integrity: sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==}
+ engines: {node: '>=10.0.0'}
+ peerDependencies:
+ bufferutil: ^4.0.1
+ utf-8-validate: '>=5.0.2'
+ peerDependenciesMeta:
+ bufferutil:
+ optional: true
+ utf-8-validate:
+ optional: true
+ dev: true
+
+ /yallist@4.0.0:
+ resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==}
dev: true
diff --git a/runatlantis.io/.vuepress/config.js b/runatlantis.io/.vuepress/config.js
index 9658fc29a5..bb7d65949c 100644
--- a/runatlantis.io/.vuepress/config.js
+++ b/runatlantis.io/.vuepress/config.js
@@ -1,8 +1,10 @@
-import { googleAnalyticsPlugin } from '@vuepress/plugin-google-analytics'
-import { docsearchPlugin } from '@vuepress/plugin-docsearch'
-import { getDirname, path } from '@vuepress/utils'
-import { defaultTheme, defineUserConfig } from 'vuepress'
-import { sitemapPlugin } from 'vuepress-plugin-sitemap2';
+import { googleAnalyticsPlugin } from '@vuepress/plugin-google-analytics';
+import { docsearchPlugin } from '@vuepress/plugin-docsearch';
+import { getDirname, path } from '@vuepress/utils';
+import { defaultTheme } from '@vuepress/theme-default';
+import { defineUserConfig } from 'vuepress';
+import { sitemapPlugin } from '@vuepress/plugin-sitemap';
+import { webpackBundler } from '@vuepress/bundler-webpack';
const __dirname = getDirname(import.meta.url)
@@ -10,6 +12,7 @@ export default defineUserConfig({
alias: {
'@theme/Home.vue': path.resolve(__dirname, './theme/components/Home.vue'),
},
+ bundler: webpackBundler(),
locales: {
'/': {
lang: 'en-US',
@@ -103,22 +106,22 @@ export default defineUserConfig({
},
sidebar: {
'/guide/': [
- '',
- 'test-drive',
- 'testing-locally',
+ '/guide/',
+ '/guide/test-drive',
+ '/guide/testing-locally',
],
'/docs/': [
{
text: 'Installing Atlantis',
collapsible: true,
children: [
- 'installation-guide',
- 'requirements',
- 'access-credentials',
- 'webhook-secrets',
- 'deployment',
- 'configuring-webhooks',
- 'provider-credentials',
+ '/docs/installation-guide',
+ '/docs/requirements',
+ '/docs/access-credentials',
+ '/docs/webhook-secrets',
+ '/docs/deployment',
+ '/docs/configuring-webhooks',
+ '/docs/provider-credentials',
]
},
{
@@ -127,23 +130,23 @@ export default defineUserConfig({
children: [
{
text: 'Overview',
- link: 'configuring-atlantis',
+ link: '/docs/configuring-atlantis',
},
- 'server-configuration',
- 'server-side-repo-config',
- 'pre-workflow-hooks',
- 'post-workflow-hooks',
- 'policy-checking',
- 'custom-workflows',
- 'repo-level-atlantis-yaml',
- 'upgrading-atlantis-yaml',
- 'command-requirements',
- 'checkout-strategy',
- 'terraform-versions',
- 'terraform-cloud',
- 'using-slack-hooks',
- 'stats',
- 'faq',
+ '/docs/server-configuration',
+ '/docs/server-side-repo-config',
+ '/docs/pre-workflow-hooks',
+ '/docs/post-workflow-hooks',
+ '/docs/policy-checking',
+ '/docs/custom-workflows',
+ '/docs/repo-level-atlantis-yaml',
+ '/docs/upgrading-atlantis-yaml',
+ '/docs/command-requirements',
+ '/docs/checkout-strategy',
+ '/docs/terraform-versions',
+ '/docs/terraform-cloud',
+ '/docs/using-slack-hooks',
+ '/docs/stats',
+ '/docs/faq',
]
},
{
@@ -152,9 +155,9 @@ export default defineUserConfig({
children: [
{
text: 'Overview',
- link: 'using-atlantis',
+ link: '/docs/using-atlantis',
},
- 'api-endpoints',
+ '/docs/api-endpoints',
]
},
{
@@ -163,26 +166,26 @@ export default defineUserConfig({
children: [
{
text: 'Overview',
- link: 'how-atlantis-works',
+ link: '/docs/how-atlantis-works',
},
- 'locking',
- 'autoplanning',
- 'automerging',
- 'security',
+ '/docs/locking',
+ '/docs/autoplanning',
+ '/docs/automerging',
+ '/docs/security',
]
},
{
text: 'Real-time Terraform Logs',
collapsible: true,
children: [
- 'streaming-logs',
+ '/docs/streaming-logs',
]
},
{
text: 'Troubleshooting',
collapsible: true,
children: [
- 'troubleshooting-https',
+ '/docs/troubleshooting-https',
]
}
]
diff --git a/runatlantis.io/.vuepress/theme/index.js b/runatlantis.io/.vuepress/theme/index.js
deleted file mode 100644
index 85ad504429..0000000000
--- a/runatlantis.io/.vuepress/theme/index.js
+++ /dev/null
@@ -1,6 +0,0 @@
-// introduce custom home with navbar
-// https://stackoverflow.com/a/60220684
-// https://vuepress.vuejs.org/theme/inheritance.html#usage
-module.exports = {
- extend: '@vuepress/theme-default'
-}
diff --git a/runatlantis.io/docs/README.md b/runatlantis.io/docs/README.md
index 5527692cf5..92d9f9c000 100644
--- a/runatlantis.io/docs/README.md
+++ b/runatlantis.io/docs/README.md
@@ -3,12 +3,12 @@
These docs are for users that are ready to get Atlantis installed and start using it.
:::tip Looking to get started?
-If you're new here, check out the [Guide](/guide/)
-where you can try our [Test Drive](/guide/test-drive.html) or [Run Atlantis Locally](/guide/testing-locally.html).
+If you're new here, check out the [Guide](../guide/README.md)
+where you can try our [Test Drive](../guide/test-drive.md) or [Run Atlantis Locally](../guide/testing-locally.md).
:::
### Next Steps
-* [Installing Atlantis](/docs/installation-guide.html) – Get Atlantis up and running
-* [Configuring Atlantis](configuring-atlantis.html) – Configure how Atlantis works for your specific use-cases
-* [Using Atlantis](using-atlantis.html) – How do you use Atlantis?
-* [How Atlantis Works](how-atlantis-works.html) – Internals of what Atlantis is doing
+* [Installing Atlantis](installation-guide.md) – Get Atlantis up and running
+* [Configuring Atlantis](configuring-atlantis.md) – Configure how Atlantis works for your specific use-cases
+* [Using Atlantis](using-atlantis.md) – How do you use Atlantis?
+* [How Atlantis Works](how-atlantis-works.md) – Internals of what Atlantis is doing
diff --git a/runatlantis.io/docs/access-credentials.md b/runatlantis.io/docs/access-credentials.md
index 9cd514fb70..1e46c9de24 100644
--- a/runatlantis.io/docs/access-credentials.md
+++ b/runatlantis.io/docs/access-credentials.md
@@ -1,5 +1,5 @@
# Git Host Access Credentials
-This page describes how to create credentials for your Git host (GitHub, GitLab, Bitbucket, or Azure DevOps)
+This page describes how to create credentials for your Git host (GitHub, GitLab, Gitea, Bitbucket, or Azure DevOps)
that Atlantis will use to make API calls.
[[toc]]
@@ -19,6 +19,7 @@ generate an access token. Read on for the instructions for your specific Git hos
* [GitHub](#github-user)
* [GitHub app](#github-app)
* [GitLab](#gitlab)
+* [Gitea](#gitea)
* [Bitbucket Cloud (bitbucket.org)](#bitbucket-cloud-bitbucket-org)
* [Bitbucket Server (aka Stash)](#bitbucket-server-aka-stash)
* [Azure DevOps](#azure-devops)
@@ -46,7 +47,7 @@ Available in Atlantis versions **newer** than 0.13.0.
- Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem`
- Restart Atlantis with new flags: `atlantis server --gh-app-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`.
- NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](/docs/server-configuration.html#config-file).
+ NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](server-configuration.md#config-file).
::: warning
Only a single installation per GitHub App is supported at the moment.
@@ -64,7 +65,7 @@ GitHub App handles the webhook calls by itself, hence there is no need to create
- Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem`
- Start Atlantis with the following flags: `atlantis server --gh-app-id --gh-installation-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`.
- NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](/docs/server-configuration.html#config-file).
+ NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](server-configuration.md#config-file).
::: tip NOTE
Manually installing the GitHub app means that the credentials can be shared by many Atlantis installations. This has the benefit of centralizing repository access for shared modules / code.
@@ -105,12 +106,21 @@ Since v0.22.3, a new permission for `Members` has been added, which is required
| Members | Read-only |
### GitLab
-- Follow: [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token)
+- Follow: [GitLab: Create a personal access token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token)
- Create a token with **api** scope
- Record the access token
+### Gitea
+- Go to "Profile and Settings" > "Settings" in Gitea (top-right)
+- Go to "Applications" under "User Settings" in Gitea
+- Create a token under the "Manage Access Tokens" with the following permissions:
+ - issue: Read and Write
+ - repository: Read and Write
+ - user: Read
+- Record the access token
+
### Bitbucket Cloud (bitbucket.org)
-- Create an App Password by following [https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/)
+- Create an App Password by following [BitBucket Cloud: Create an app password](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/)
- Label the password "atlantis"
- Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them
- Record the access token
@@ -126,7 +136,7 @@ Since v0.22.3, a new permission for `Members` has been added, which is required
NOTE: Atlantis will send the token as a [Bearer Auth to the Bitbucket API](https://confluence.atlassian.com/bitbucketserver/http-access-tokens-939515499.html#HTTPaccesstokens-UsingHTTPaccesstokens) instead of using Basic Auth.
### Azure DevOps
-- Create a Personal access token by following [https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops)
+- Create a Personal access token by following [Azure DevOps: Use personal access tokens to authenticate](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops)
- Label the password "atlantis"
- The minimum scopes required for this token are:
- Code (Read & Write)
@@ -135,4 +145,4 @@ Since v0.22.3, a new permission for `Members` has been added, which is required
- Record the access token
## Next Steps
-Once you've got your user and access token, you're ready to create a webhook secret. See [Creating a Webhook Secret](webhook-secrets.html).
+Once you've got your user and access token, you're ready to create a webhook secret. See [Creating a Webhook Secret](webhook-secrets.md).
diff --git a/runatlantis.io/docs/api-endpoints.md b/runatlantis.io/docs/api-endpoints.md
index 96dd6d0b51..88bf90f937 100644
--- a/runatlantis.io/docs/api-endpoints.md
+++ b/runatlantis.io/docs/api-endpoints.md
@@ -9,7 +9,7 @@ To enable the API endpoints, `api-secret` should be configured.
:::tip Prerequisites
-* Set `api-secret` as part of the [Server Configuration](server-configuration.html#api-secret)
+* Set `api-secret` as part of the [Server Configuration](server-configuration.md#api-secret)
* Pass `X-Atlantis-Token` with the same secret in the request header
:::
@@ -17,21 +17,21 @@ To enable the API endpoints, `api-secret` should be configured.
#### Description
-Execute [atlantis plan](using-atlantis.html#atlantis-plan) on the specified repository.
+Execute [atlantis plan](using-atlantis.md#atlantis-plan) on the specified repository.
#### Parameters
-| Name | Type | Required | Description |
-|------------|-------------------------------------|----------|------------------------------------------|
-| Repository | string | Yes | Name of the Terraform repository |
-| Ref | string | Yes | Git reference, like a branch name |
-| Type | string | Yes | Type of the VCS provider (Github/Gitlab) |
-| Paths | [ [Path](api-endpoints.html#path) ] | Yes | Paths to the projects to run the plan |
-| PR | int | No | Pull Request number |
+| Name | Type | Required | Description |
+|------------|-----------------------------------|----------|------------------------------------------|
+| Repository | string | Yes | Name of the Terraform repository |
+| Ref | string | Yes | Git reference, like a branch name |
+| Type | string | Yes | Type of the VCS provider (Github/Gitlab) |
+| Paths | [ [Path](api-endpoints.md#path) ] | Yes | Paths to the projects to run the plan |
+| PR | int | No | Pull Request number |
##### Path
-Similar to the [Options](using-atlantis.html#options) of `atlantis plan`. Path specifies which directory/workspace
+Similar to the [Options](using-atlantis.md#options) of `atlantis plan`. Path specifies which directory/workspace
within the repository to run the plan.
At least one of `Directory` or `Workspace` should be specified.
@@ -92,21 +92,21 @@ curl --request POST 'https:///api/plan' \
#### Description
-Execute [atlantis apply](using-atlantis.html#atlantis-apply) on the specified repository.
+Execute [atlantis apply](using-atlantis.md#atlantis-apply) on the specified repository.
#### Parameters
-| Name | Type | Required | Description |
-|------------|---------------------------------------|----------|------------------------------------------|
-| Repository | string | Yes | Name of the Terraform repository |
-| Ref | string | Yes | Git reference, like a branch name |
-| Type | string | Yes | Type of the VCS provider (Github/Gitlab) |
-| Paths | [ [Path](api-endpoints.html#path-1) ] | Yes | Paths to the projects to run the apply |
-| PR | int | No | Pull Request number |
+| Name | Type | Required | Description |
+|------------|-------------------------------------|----------|------------------------------------------|
+| Repository | string | Yes | Name of the Terraform repository |
+| Ref | string | Yes | Git reference, like a branch name |
+| Type | string | Yes | Type of the VCS provider (Github/Gitlab) |
+| Paths | [ [Path](api-endpoints.md#path-1) ] | Yes | Paths to the projects to run the apply |
+| PR | int | No | Pull Request number |
##### Path
-Similar to the [Options](using-atlantis.html#options-1) of `atlantis apply`. Path specifies which directory/workspace
+Similar to the [Options](using-atlantis.md#options-1) of `atlantis apply`. Path specifies which directory/workspace
within the repository to run the apply.
At least one of `Directory` or `Workspace` should be specified.
diff --git a/runatlantis.io/docs/apply-requirements.md b/runatlantis.io/docs/apply-requirements.md
index 870ac4972e..166931851d 100644
--- a/runatlantis.io/docs/apply-requirements.md
+++ b/runatlantis.io/docs/apply-requirements.md
@@ -1,5 +1,5 @@
# Apply Requirements
:::warning REDIRECT
-This page is moved to [Command Requirements](/docs/command-requirements.html).
+This page is moved to [Command Requirements](command-requirements.md).
:::
diff --git a/runatlantis.io/docs/autoplanning.md b/runatlantis.io/docs/autoplanning.md
index 2183219703..730dcd98a9 100644
--- a/runatlantis.io/docs/autoplanning.md
+++ b/runatlantis.io/docs/autoplanning.md
@@ -26,8 +26,8 @@ Given the directory structure:
* If `project1/main.tf` were modified, we would run `plan` in `project1`
* If `modules/module1/main.tf` were modified, we would not automatically run `plan` because we couldn't determine the location of the terraform project
- * You could use an [atlantis.yaml](repo-level-atlantis-yaml.html#configuring-planning) file to specify which projects to plan when this module changed
- * You could enable [module autoplanning](server-configuration.html#autoplan-modules) which indexes projects to their local module dependencies.
+ * You could use an [atlantis.yaml](repo-level-atlantis-yaml.md#configuring-planning) file to specify which projects to plan when this module changed
+ * You could enable [module autoplanning](server-configuration.md#autoplan-modules) which indexes projects to their local module dependencies.
* Or you could manually plan with `atlantis plan -d `
* If `project1/modules/module1/main.tf` were modified, we would look one level above `project1/modules`
into `project1/`, see that there was a `main.tf` file and so run plan in `project1/`
@@ -42,5 +42,5 @@ This scenario can happen if:
If you would like to customize how Atlantis determines which directory to run in
or disable it all together you need to create an `atlantis.yaml` file.
See
-* [Disabling Autoplanning](repo-level-atlantis-yaml.html#disabling-autoplanning)
-* [Configuring Planning](repo-level-atlantis-yaml.html#configuring-planning)
+* [Disabling Autoplanning](repo-level-atlantis-yaml.md#disabling-autoplanning)
+* [Configuring Planning](repo-level-atlantis-yaml.md#configuring-planning)
diff --git a/runatlantis.io/docs/command-requirements.md b/runatlantis.io/docs/command-requirements.md
index e3aea4ea21..046542f786 100644
--- a/runatlantis.io/docs/command-requirements.md
+++ b/runatlantis.io/docs/command-requirements.md
@@ -92,7 +92,7 @@ Each VCS provider has a different concept of "mergeability":
::: warning
Some VCS providers have a feature for branch protection to control "mergeability". To use it,
limit the base branch so to not bypass the branch protection.
-See also the `branch` keyword in [Server Side Repo Config](server-side-repo-config.html#reference) for more details.
+See also the `branch` keyword in [Server Side Repo Config](server-side-repo-config.md#reference) for more details.
:::
#### GitHub
@@ -103,9 +103,9 @@ If you set up Protected Branches then you can enforce:
* Requiring certain status checks to be passing
* Requiring certain people to have reviewed and approved the pull request
* Requiring `CODEOWNERS` to have reviewed and approved the pull request
-* Requiring that the branch is up to date with `main`
+* Requiring that the branch is up-to-date with `main`
-See [https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches)
+See [GitHub: About protected branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches)
for more details.
::: warning
@@ -255,7 +255,7 @@ Once the apply requirement is satisfied, **anyone** that can comment on the pull
request can run the actual `atlantis apply` command.
## Next Steps
-* For more information on GitHub pull request reviews and approvals see: [https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews)
-* For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: [https://docs.gitlab.com/ee/user/project/merge_requests/approvals/](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/).
-* For more information on Bitbucket pull request reviews and approvals see: [https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html](https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html)
-* For more information on Azure DevOps pull request reviews and approvals see: [https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser)
+* For more information on GitHub pull request reviews and approvals see: [GitHub: About pull request reviews](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews)
+* For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: [GitLab: Merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/).
+* For more information on Bitbucket pull request reviews and approvals see: [BitBucket: Use pull requests for code review](https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html)
+* For more information on Azure DevOps pull request reviews and approvals see: [Azure DevOps: Create pull requests](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser)
diff --git a/runatlantis.io/docs/configuring-atlantis.md b/runatlantis.io/docs/configuring-atlantis.md
index 46edbbbc3c..d3718d9bb5 100644
--- a/runatlantis.io/docs/configuring-atlantis.md
+++ b/runatlantis.io/docs/configuring-atlantis.md
@@ -10,16 +10,16 @@ Flags to `atlantis server` are used to configure the global operation of
Atlantis, for example setting credentials for your Git Host
or configuring SSL certs.
-See [Server Configuration](server-configuration.html) for more details.
+See [Server Configuration](server-configuration.md) for more details.
## Server-Side Repo Config
A Server-Side Repo Config file is used to control per-repo behaviour
and what users can do in repo-level `atlantis.yaml` files.
-See [Server-Side Repo Config](server-side-repo-config.html) for more details.
+See [Server-Side Repo Config](server-side-repo-config.md) for more details.
## Repo-Level `atlantis.yaml` Files
`atlantis.yaml` files placed at the root of your Terraform repos can be used to
change the default Atlantis behaviour for each repo.
-See [Repo-Level atlantis.yaml Files](repo-level-atlantis-yaml.html) for more details.
+See [Repo-Level atlantis.yaml Files](repo-level-atlantis-yaml.md) for more details.
diff --git a/runatlantis.io/docs/configuring-webhooks.md b/runatlantis.io/docs/configuring-webhooks.md
index be285ef6bc..82a6e1d3c3 100644
--- a/runatlantis.io/docs/configuring-webhooks.md
+++ b/runatlantis.io/docs/configuring-webhooks.md
@@ -54,6 +54,26 @@ If you're using GitLab, navigate to your project's home page in GitLab
- click **Add webhook**
- See [Next Steps](#next-steps)
+## Gitea
+If you're using Gitea, navigate to your project's home page in Gitea
+- Click **Settings > Webhooks** in the top- and then sidebar
+- Click **Add webhook > Gitea** (Gitea webhooks are service specific, but this works)
+- set **Target URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`**
+- double-check you added `/events` to the end of your URL.
+- set **Secret** to the Webhook Secret you generated previously
+ - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret.
+- Select **Custom Events...**
+- Check the boxes
+ - **Repository events > Push**
+ - **Issue events > Issue Comment**
+ - **Pull Request events > Pull Request**
+ - **Pull Request events > Pull Request Comment**
+ - **Pull Request events > Pull Request Reviewed**
+ - **Pull Request events > Pull Request Synchronized**
+- Leave **Active** checked
+- Click **Add Webhook**
+- See [Next Steps](#next-steps)
+
## Bitbucket Cloud (bitbucket.org)
- Go to your repo's home page
- Click **Settings** in the sidebar
diff --git a/runatlantis.io/docs/custom-workflows.md b/runatlantis.io/docs/custom-workflows.md
index 61f7ae78ef..69ce21e19a 100644
--- a/runatlantis.io/docs/custom-workflows.md
+++ b/runatlantis.io/docs/custom-workflows.md
@@ -11,9 +11,9 @@ Custom workflows can be specified in the Server-Side Repo Config or in the Repo-
**Notes**
* If you want to allow repos to select their own workflows, they must have the
-`allowed_overrides: [workflow]` setting. See [server-side repo config use cases](server-side-repo-config.html#allow-repos-to-choose-a-server-side-workflow) for more details.
+`allowed_overrides: [workflow]` setting. See [server-side repo config use cases](server-side-repo-config.md#allow-repos-to-choose-a-server-side-workflow) for more details.
* If in addition you also want to allow repos to define their own workflows, they must have the
-`allow_custom_workflows: true` setting. See [server-side repo config use cases](server-side-repo-config.html#allow-repos-to-define-their-own-workflows) for more details.
+`allow_custom_workflows: true` setting. See [server-side repo config use cases](server-side-repo-config.md#allow-repos-to-define-their-own-workflows) for more details.
## Use Cases
@@ -117,7 +117,7 @@ workflows:
extra_args: ["-lock=false"]
```
-If [policy checking](/docs/policy-checking.html#how-it-works) is enabled, `extra_args` can also be used to change the default behaviour of conftest.
+If [policy checking](policy-checking.md#how-it-works) is enabled, `extra_args` can also be used to change the default behaviour of conftest.
```yaml
workflows:
@@ -356,7 +356,7 @@ workflows:
::: warning
Atlantis will need to have the `terragrunt` binary in its PATH.
-If you're using Docker you can build your own image, see [Customization](/docs/deployment.html#customization).
+If you're using Docker you can build your own image, see [Customization](deployment.md#customization).
:::
If you don't want to create/manage the repo's `atlantis.yaml` file yourself, you can use the tool [terragrunt-atlantis-config](https://github.com/transcend-io/terragrunt-atlantis-config) to generate it.
@@ -530,7 +530,7 @@ Full
either be generated (by show) or already exist (if running policy checks). Can be used to
override the built-in `plan`/`apply` commands, ex. `run: terraform show -json $PLANFILE > $SHOWFILE`.
* `POLICYCHECKFILE` - Absolute path to the location of policy check output if Atlantis runs policy checks.
- See [policy checking](/docs/policy-checking.html#data-for-custom-run-steps) for information of data structure.
+ See [policy checking](policy-checking.md#data-for-custom-run-steps) for information of data structure.
* `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`.
* `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`.
* `HEAD_REPO_NAME` - Name of the repository that is getting merged into the base repository, ex. `atlantis`.
diff --git a/runatlantis.io/docs/deployment.md b/runatlantis.io/docs/deployment.md
index 05e91b5e70..9c21f5ed1c 100644
--- a/runatlantis.io/docs/deployment.md
+++ b/runatlantis.io/docs/deployment.md
@@ -2,8 +2,8 @@
This page covers getting Atlantis up and running in your infrastructure.
::: tip Prerequisites
-* You have created [access credentials](access-credentials.html) for your Atlantis user
-* You have created a [webhook secret](webhook-secrets.html)
+* You have created [access credentials](access-credentials.md) for your Atlantis user
+* You have created a [webhook secret](webhook-secrets.md)
:::
[[toc]]
@@ -17,10 +17,10 @@ Atlantis [Docker image](https://ghcr.io/runatlantis/atlantis).
### Routing
Atlantis and your Git host need to be able to route and communicate with one another. Your Git host needs to be able to send webhooks to Atlantis and Atlantis needs to be able to make API calls to your Git host.
If you're using
-a public Git host like github.com, gitlab.com, bitbucket.org, or dev.azure.com then you'll need to
+a public Git host like github.com, gitlab.com, gitea.com, bitbucket.org, or dev.azure.com then you'll need to
expose Atlantis to the internet.
-If you're using a private Git host like GitHub Enterprise, GitLab Enterprise or
+If you're using a private Git host like GitHub Enterprise, GitLab Enterprise, self-hosted Gitea or
Bitbucket Server, then Atlantis needs to be routable from the private host and Atlantis will need to be able to route to the private host.
### Data
@@ -68,7 +68,7 @@ To install:
orgAllowlist: github.com/runatlantis/*
```
**Note**: For helm chart version < `4.0.2`, `orgWhitelist` must be used instead.
-1. Configure any other variables (see [https://github.com/runatlantis/helm-charts#customization](https://github.com/runatlantis/helm-charts#customization)
+1. Configure any other variables (see [Atlantis Helm Chart: Customization](https://github.com/runatlantis/helm-charts#customization)
for documentation)
1. Run
```sh
@@ -104,23 +104,26 @@ If you're using Bitbucket Cloud then there is no webhook secret since it's not s
:::
Next, edit the manifests below as follows:
-1. Replace `` in `image: ghcr.io/runatlantis/atlantis:` with the most recent version from [https://github.com/runatlantis/atlantis/releases/latest](https://github.com/runatlantis/atlantis/releases/latest).
+1. Replace `` in `image: ghcr.io/runatlantis/atlantis:` with the most recent version from [GitHub: Atlantis latest release](https://github.com/runatlantis/atlantis/releases/latest).
* NOTE: You never want to run with `:latest` because if your Pod moves to a new node, Kubernetes will pull the latest image and you might end
up upgrading Atlantis by accident!
2. Replace `value: github.com/yourorg/*` under `name: ATLANTIS_REPO_ALLOWLIST` with the allowlist pattern
-for your Terraform repos. See [Repo Allowlist](server-configuration.html#repo-allowlist) for more details.
+for your Terraform repos. See [--repo-allowlist](server-configuration.md#repo-allowlist) for more details.
3. If you're using GitHub:
1. Replace `` with the username of your Atlantis GitHub user without the `@`.
- 2. Delete all the `ATLANTIS_GITLAB_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables.
+ 2. Delete all the `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables.
4. If you're using GitLab:
1. Replace `` with the username of your Atlantis GitLab user without the `@`.
- 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables.
-5. If you're using Bitbucket:
+ 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITEA_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables.
+5. If you're using Gitea:
+ 1. Replace `` with the username of your Atlantis Gitea user without the `@`.
+ 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables.
+6. If you're using Bitbucket:
1. Replace `` with the username of your Atlantis Bitbucket user without the `@`.
- 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables.
-6. If you're using Azure DevOps:
+ 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables.
+7. If you're using Azure DevOps:
1. Replace `` with the username of your Atlantis Azure DevOps user without the `@`.
- 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, and `ATLANTIS_BITBUCKET_*` environment variables.
+ 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, and `ATLANTIS_BITBUCKET_*` environment variables.
#### StatefulSet Manifest
@@ -185,6 +188,21 @@ spec:
key: webhook-secret
### End GitLab Config ###
+ ### Gitea Config ###
+ - name: ATLANTIS_GITEA_USER
+ value: # 4i. If you're using Gitea replace with the username of your Atlantis Gitea user without the `@`.
+ - name: ATLANTIS_GITEA_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: token
+ - name: ATLANTIS_GITEA_WEBHOOK_SECRET
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: webhook-secret
+ ### End Gitea Config ###
+
### Bitbucket Config ###
- name: ATLANTIS_BITBUCKET_USER
value: # 5i. If you're using Bitbucket replace with the username of your Atlantis Bitbucket user without the `@`.
@@ -333,6 +351,21 @@ spec:
key: webhook-secret
### End GitLab Config ###
+ ### Gitea Config ###
+ - name: ATLANTIS_GITEA_USER
+ value: # 4i. If you're using Gitea replace with the username of your Atlantis Gitea user without the `@`.
+ - name: ATLANTIS_GITEA_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: token
+ - name: ATLANTIS_GITEA_WEBHOOK_SECRET
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: webhook-secret
+ ### End Gitea Config ###
+
### Bitbucket Config ###
- name: ATLANTIS_BITBUCKET_USER
value: # 5i. If you're using Bitbucket replace with the username of your Atlantis Bitbucket user without the `@`.
@@ -412,7 +445,7 @@ The manifests above create a Kubernetes `Service` of `type: ClusterIP` which isn
Depending on how you're doing routing into Kubernetes, you may want to use a Service of `type: LoadBalancer` so that Atlantis is accessible
to GitHub/GitLab and your internal users.
-If you want to add SSL you can use something like [https://github.com/jetstack/cert-manager](https://github.com/jetstack/cert-manager) to generate SSL
+If you want to add SSL you can use something like [cert-manager](https://github.com/cert-manager/cert-manager) to generate SSL
certs and mount them into the Pod. Then set the `ATLANTIS_SSL_CERT_FILE` and `ATLANTIS_SSL_KEY_FILE` environment variables to enable SSL.
You could also set up SSL at your LoadBalancer.
@@ -481,6 +514,26 @@ containers:
key: webhook-secret
```
+#### Gitea
+
+```yaml
+containers:
+- name: atlantis
+ env:
+ - name: ATLANTIS_GITEA_USER
+ value: # 4i. If you're using Gitea replace with the username of your Atlantis Gitea user without the `@`.
+ - name: ATLANTIS_GITEA_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: token
+ - name: ATLANTIS_GITEA_WEBHOOK_SECRET
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: webhook-secret
+```
+
#### GitHub
```yaml
@@ -532,7 +585,7 @@ If you'd like to run Atlantis on [AWS Fargate](https://aws.amazon.com/fargate/)
You can run Atlantis on GKE using the [Helm chart](#kubernetes-helm-chart) or the [manifests](#kubernetes-manifests).
There is also a set of full Terraform configurations that create a GKE Cluster,
-Cloud Storage Backend and TLS certs: [https://github.com/sethvargo/atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke).
+Cloud Storage Backend and TLS certs: [sethvargo atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke).
Once you're done, see [Next Steps](#next-steps).
@@ -582,7 +635,7 @@ Another option is [Azure Container Instances](https://docs.microsoft.com/en-us/a
### Roll Your Own
If you want to roll your own Atlantis installation, you can get the `atlantis`
-binary from [https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases)
+binary from [GitHub](https://github.com/runatlantis/atlantis/releases)
or use the [official Docker image](https://ghcr.io/runatlantis/atlantis).
#### Startup Command
@@ -632,6 +685,17 @@ atlantis server \
--repo-allowlist="$REPO_ALLOWLIST"
```
+##### Gitea
+```bash
+atlantis server \
+--atlantis-url="$URL" \
+--gitea-user="$USERNAME" \
+--gitea-token="$TOKEN" \
+--gitea-webhook-secret="$SECRET" \
+--gitea-page-size=30 \
+--repo-allowlist="$REPO_ALLOWLIST"
+```
+
##### Bitbucket Cloud (bitbucket.org)
```bash
atlantis server \
@@ -671,20 +735,21 @@ atlantis server \
Where
- `$URL` is the URL that Atlantis can be reached at
-- `$USERNAME` is the GitHub/GitLab/Bitbucket/AzureDevops username you generated the token for
+- `$USERNAME` is the GitHub/GitLab/Gitea/Bitbucket/AzureDevops username you generated the token for
- `$TOKEN` is the access token you created. If you don't want this to be passed
in as an argument for security reasons you can specify it in a config file
- (see [Configuration](/docs/server-configuration.html#environment-variables))
- or as an environment variable: `ATLANTIS_GH_TOKEN` or `ATLANTIS_GITLAB_TOKEN`
+ (see [Configuration](server-configuration.md#environment-variables))
+ or as an environment variable: `ATLANTIS_GH_TOKEN` or `ATLANTIS_GITLAB_TOKEN` or `ATLANTIS_GITEA_TOKEN`
or `ATLANTIS_BITBUCKET_TOKEN` or `ATLANTIS_AZUREDEVOPS_TOKEN`
- `$SECRET` is the random key you used for the webhook secret.
If you don't want this to be passed in as an argument for security reasons
you can specify it in a config file
- (see [Configuration](/docs/server-configuration.html#environment-variables))
- or as an environment variable: `ATLANTIS_GH_WEBHOOK_SECRET` or `ATLANTIS_GITLAB_WEBHOOK_SECRET`
+ (see [Configuration](server-configuration.md#environment-variables))
+ or as an environment variable: `ATLANTIS_GH_WEBHOOK_SECRET` or `ATLANTIS_GITLAB_WEBHOOK_SECRET` or
+ `ATLANTIS_GITEA_WEBHOOK_SECRET`
- `$REPO_ALLOWLIST` is which repos Atlantis can run on, ex.
`github.com/runatlantis/*` or `github.enterprise.corp.com/*`.
- See [Repo Allowlist](server-configuration.html#repo-allowlist) for more details.
+ See [--repo-allowlist](server-configuration.md#repo-allowlist) for more details.
Atlantis is now running!
::: tip
@@ -694,4 +759,4 @@ restart it in case of failure.
## Next Steps
* To ensure Atlantis is running, load its UI. By default Atlantis runs on port `4141`.
-* Now you're ready to add Webhooks to your repos. See [Configuring Webhooks](configuring-webhooks.html).
+* Now you're ready to add Webhooks to your repos. See [Configuring Webhooks](configuring-webhooks.md).
diff --git a/runatlantis.io/docs/how-atlantis-works.md b/runatlantis.io/docs/how-atlantis-works.md
index ed57d988f5..8469214383 100644
--- a/runatlantis.io/docs/how-atlantis-works.md
+++ b/runatlantis.io/docs/how-atlantis-works.md
@@ -1,7 +1,7 @@
# How Atlantis Works
This section of docs talks about how Atlantis at deeper level.
-* [Locking](locking.html)
-* [Autoplanning](autoplanning.html)
-* [Automerging](automerging.html)
-* [Security](security.html)
+* [Locking](locking.md)
+* [Autoplanning](autoplanning.md)
+* [Automerging](automerging.md)
+* [Security](security.md)
diff --git a/runatlantis.io/docs/installation-guide.md b/runatlantis.io/docs/installation-guide.md
index fafa5d5b90..fae9706850 100644
--- a/runatlantis.io/docs/installation-guide.md
+++ b/runatlantis.io/docs/installation-guide.md
@@ -2,19 +2,19 @@
This guide is for installing a **production-ready** instance of Atlantis onto your
infrastructure:
1. First, ensure your Terraform setup meets the Atlantis **requirements**
- * See [Requirements](requirements.html)
-1. Create **access credentials** for your Git host (GitHub, GitLab, Bitbucket, Azure DevOps)
- * See [Generating Git Host Access Credentials](access-credentials.html)
+ * See [Requirements](requirements.md)
+1. Create **access credentials** for your Git host (GitHub, GitLab, Gitea, Bitbucket, Azure DevOps)
+ * See [Generating Git Host Access Credentials](access-credentials.md)
1. Create a **webhook secret** so Atlantis can validate webhooks
- * See [Creating a Webhook Secret](webhook-secrets.html)
+ * See [Creating a Webhook Secret](webhook-secrets.md)
1. **Deploy** Atlantis into your infrastructure
- * See [Deployment](deployment.html)
+ * See [Deployment](deployment.md)
1. Configure **Webhooks** on your Git host so Atlantis can respond to your pull requests
- * See [Configuring Webhooks](configuring-webhooks.html)
+ * See [Configuring Webhooks](configuring-webhooks.md)
1. Configure **provider credentials** so Atlantis can actually run Terraform commands
- * See [Provider Credentials](provider-credentials.html)
+ * See [Provider Credentials](provider-credentials.md)
:::tip
-If you want to test out Atlantis first, check out [Test Drive](../guide/test-drive.html)
-and [Testing Locally](../guide/testing-locally.html).
+If you want to test out Atlantis first, check out [Test Drive](../guide/test-drive.md)
+and [Testing Locally](../guide/testing-locally.md).
:::
diff --git a/runatlantis.io/docs/policy-checking.md b/runatlantis.io/docs/policy-checking.md
index c996ef7ee0..089685d303 100644
--- a/runatlantis.io/docs/policy-checking.md
+++ b/runatlantis.io/docs/policy-checking.md
@@ -10,7 +10,7 @@ for using this step include:
## How it works?
-Enabling "policy checking" in addition to the [mergeable apply requirement](/docs/command-requirements.html#supported-requirements) blocks applies on plans that fail any of the defined conftest policies.
+Enabling "policy checking" in addition to the [mergeable apply requirement](command-requirements.md#supported-requirements) blocks applies on plans that fail any of the defined conftest policies.
![Policy Check Apply Failure](./images/policy-check-apply-failure.png)
@@ -44,7 +44,7 @@ All repositories will have policy checking enabled.
### Step 2: Define the policy configuration
-Policy Configuration is defined in the [server-side repo configuration](https://www.runatlantis.io/docs/server-side-repo-config.html#reference).
+Policy Configuration is defined in the [server-side repo configuration](server-side-repo-config.md#reference).
In this example we will define one policy set with one owner:
@@ -72,7 +72,7 @@ policies:
- `owners` - Defines the users/teams which are able to approve a specific policy set.
- `approve_count` - Defines the number of approvals needed to bypass policy checks. Defaults to the top-level policies configuration, if not specified.
-By default conftest is configured to only run the `main` package. If you wish to run specific/multiple policies consider passing `--namespace` or `--all-namespaces` to conftest with [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) via a custom workflow as shown in the below example.
+By default conftest is configured to only run the `main` package. If you wish to run specific/multiple policies consider passing `--namespace` or `--all-namespaces` to conftest with [`extra_args`](custom-workflows.md#adding-extra-arguments-to-terraform-commands) via a custom workflow as shown in the below example.
Example Server Side Repo configuration using `--all-namespaces` and a local src dir.
@@ -144,7 +144,7 @@ That's it! Now your Atlantis instance is configured to run policies on your Terr
### Pulling policies from a remote location
-Conftest supports [pulling policies](https://www.conftest.dev/sharing/#pulling) from remote locations such as S3, git, OCI, and other protocols supported by the [go-getter](https://github.com/hashicorp/go-getter) library. The key [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) can be used to pass in the [`--update`](https://www.conftest.dev/sharing/#-update-flag) flag to tell `conftest` to pull the policies into the project folder before running the policy check.
+Conftest supports [pulling policies](https://www.conftest.dev/sharing/#pulling) from remote locations such as S3, git, OCI, and other protocols supported by the [go-getter](https://github.com/hashicorp/go-getter) library. The key [`extra_args`](custom-workflows.md#adding-extra-arguments-to-terraform-commands) can be used to pass in the [`--update`](https://www.conftest.dev/sharing/#-update-flag) flag to tell `conftest` to pull the policies into the project folder before running the policy check.
```yaml
workflows:
@@ -163,7 +163,7 @@ Note that authentication may need to be configured separately if pulling policie
### Running policy check against Terraform source code
-By default, Atlantis runs the policy check against the [`SHOWFILE`](https://www.runatlantis.io/docs/custom-workflows.html#custom-run-command). In order to run the policy test against Terraform files directly, override the default `conftest` command used and pass in `*.tf` as one of the inputs to `conftest`. The `show` step is required so that Atlantis will generate the `SHOWFILE`.
+By default, Atlantis runs the policy check against the [`SHOWFILE`](custom-workflows.md#custom-run-command). In order to run the policy test against Terraform files directly, override the default `conftest` command used and pass in `*.tf` as one of the inputs to `conftest`. The `show` step is required so that Atlantis will generate the `SHOWFILE`.
```yaml
workflows:
@@ -176,7 +176,7 @@ workflows:
### Quiet policy checks
-By default, Atlantis will add a comment to all pull requests with the policy check result - both successes and failures. Version 0.21.0 added the [`--quiet-policy-checks`](server-configuration.html#quiet-policy-checks) option, which will instead only add comments when policy checks fail, significantly reducing the number of comments when most policy check results succeed.
+By default, Atlantis will add a comment to all pull requests with the policy check result - both successes and failures. Version 0.21.0 added the [`--quiet-policy-checks`](server-configuration.md#quiet-policy-checks) option, which will instead only add comments when policy checks fail, significantly reducing the number of comments when most policy check results succeed.
### Data for custom run steps
@@ -198,7 +198,7 @@ When the policy check workflow runs, a file is created in the working directory
## Running policy check only on some repositories
-When policy checking is enabled it will be enforced on all repositories, in order to disable policy checking on some repositories first [enable policy checks](https://www.runatlantis.io/docs/policy-checking.html#getting-started) and then disable it explicitly on each repository with the `policy_check` flag.
+When policy checking is enabled it will be enforced on all repositories, in order to disable policy checking on some repositories first [enable policy checks](policy-checking.md#getting-started) and then disable it explicitly on each repository with the `policy_check` flag.
For server side config:
```yml
diff --git a/runatlantis.io/docs/post-workflow-hooks.md b/runatlantis.io/docs/post-workflow-hooks.md
index a9f1e05e94..0a63ab1f0b 100644
--- a/runatlantis.io/docs/post-workflow-hooks.md
+++ b/runatlantis.io/docs/post-workflow-hooks.md
@@ -2,7 +2,7 @@
Post workflow hooks can be defined to run scripts after default or custom
workflows are executed. Post workflow hooks differ from [custom
-workflows](custom-workflows.html#custom-run-command) in that they are run
+workflows](custom-workflows.md#custom-run-command) in that they are run
outside of Atlantis commands. Which means they do not surface their output
back to the PR as a comment.
@@ -88,7 +88,7 @@ repos:
### Custom `run` Command
This is very similar to [custom workflow run
-command](custom-workflows.html#custom-run-command).
+command](custom-workflows.md#custom-run-command).
```yaml
- run: custom-command
diff --git a/runatlantis.io/docs/pre-workflow-hooks.md b/runatlantis.io/docs/pre-workflow-hooks.md
index 9087be24c7..2085953d77 100644
--- a/runatlantis.io/docs/pre-workflow-hooks.md
+++ b/runatlantis.io/docs/pre-workflow-hooks.md
@@ -2,10 +2,10 @@
Pre workflow hooks can be defined to run scripts before default or custom
workflows are executed. Pre workflow hooks differ from [custom
-workflows](custom-workflows.html#custom-run-command) in several ways.
+workflows](custom-workflows.md#custom-run-command) in several ways.
1. Pre workflow hooks do not require the repository configuration to be
- present. This can be utilized to [dynamically generate repo configs](pre-workflow-hooks.html#dynamic-repo-config-generation).
+ present. This can be utilized to [dynamically generate repo configs](pre-workflow-hooks.md#dynamic-repo-config-generation).
2. Pre workflow hooks are run outside of Atlantis commands. Which means
they do not surface their output back to the PR as a comment.
@@ -19,7 +19,7 @@ Pre workflow hooks can only be specified in the Server-Side Repo Config under th
::: tip Note
By default, `pre-workflow-hooks` do not prevent Atlantis from executing its
workflows(`plan`, `apply`) even if a `run` command exits with an error. This
-behavior can be changed by setting the [fail-on-pre-workflow-hook-error](server-configuration.html#fail-on-pre-workflow-hook-error)
+behavior can be changed by setting the [fail-on-pre-workflow-hook-error](server-configuration.md#fail-on-pre-workflow-hook-error)
flag in the Atlantis server configuration.
:::
@@ -84,7 +84,7 @@ repos:
### Custom `run` Command
This is very similar to the [custom workflow run
-command](custom-workflows.html#custom-run-command).
+command](custom-workflows.md#custom-run-command).
```yaml
- run: custom-command
diff --git a/runatlantis.io/docs/provider-credentials.md b/runatlantis.io/docs/provider-credentials.md
index 793c082e94..dd9a9cfa4c 100644
--- a/runatlantis.io/docs/provider-credentials.md
+++ b/runatlantis.io/docs/provider-credentials.md
@@ -5,8 +5,8 @@ Just like when you run Terraform locally, Atlantis needs credentials for your
specific provider.
It's up to you how you provide credentials for your specific provider to Atlantis:
-* The Atlantis [Helm Chart](deployment.html#kubernetes-helm-chart) and
- [AWS Fargate Module](deployment.html#aws-fargate) have their own mechanisms for provider
+* The Atlantis [Helm Chart](deployment.md#kubernetes-helm-chart) and
+ [AWS Fargate Module](deployment.md#aws-fargate) have their own mechanisms for provider
credentials. Read their docs.
* If you're running Atlantis in a cloud then many clouds have ways to give cloud API access
to applications running on them, ex:
@@ -89,5 +89,5 @@ You can still set these variables yourself using the `extra_args` configuration.
:::
## Next Steps
-* If you want to configure Atlantis further, read [Configuring Atlantis](configuring-atlantis.html)
-* If you're ready to use Atlantis, read [Using Atlantis](using-atlantis.html)
+* If you want to configure Atlantis further, read [Configuring Atlantis](configuring-atlantis.md)
+* If you're ready to use Atlantis, read [Using Atlantis](using-atlantis.md)
diff --git a/runatlantis.io/docs/repo-level-atlantis-yaml.md b/runatlantis.io/docs/repo-level-atlantis-yaml.md
index c4c6ed3792..aba096f311 100644
--- a/runatlantis.io/docs/repo-level-atlantis-yaml.md
+++ b/runatlantis.io/docs/repo-level-atlantis-yaml.md
@@ -16,16 +16,16 @@ but some of the keys are restricted by default.
Restricted keys can be set in the server-side `repos.yaml` repo config file.
You can enable `atlantis.yaml` to override restricted
-keys by setting the `allowed_overrides` key there. See the [Server Side Repo Config](server-side-repo-config.html) for
+keys by setting the `allowed_overrides` key there. See the [Server Side Repo Config](server-side-repo-config.md) for
more details.
**Notes**
* By default, repo root `atlantis.yaml` file is used.
-* You can change this behaviour by setting [Server Side Repo Config](server-side-repo-config.html)
+* You can change this behaviour by setting [Server Side Repo Config](server-side-repo-config.md)
::: danger DANGER
Atlantis uses the `atlantis.yaml` version from the pull request, similar to other
-CI/CD systems. If you're allowing users to [create custom workflows](server-side-repo-config.html#allow-repos-to-define-their-own-workflows)
+CI/CD systems. If you're allowing users to [create custom workflows](server-side-repo-config.md#allow-repos-to-define-their-own-workflows)
then this means
anyone that can create a pull request to your repo can run arbitrary code on the
Atlantis server.
@@ -230,19 +230,19 @@ atlantis apply -w staging -d project1
```
### Using .tfvars files
-See [Custom Workflow Use Cases: Using .tfvars files](custom-workflows.html#tfvars-files)
+See [Custom Workflow Use Cases: Using .tfvars files](custom-workflows.md#tfvars-files)
### Adding extra arguments to Terraform commands
-See [Custom Workflow Use Cases: Adding extra arguments to Terraform commands](custom-workflows.html#adding-extra-arguments-to-terraform-commands)
+See [Custom Workflow Use Cases: Adding extra arguments to Terraform commands](custom-workflows.md#adding-extra-arguments-to-terraform-commands)
### Custom init/plan/apply Commands
-See [Custom Workflow Use Cases: Custom init/plan/apply Commands](custom-workflows.html#custom-init-plan-apply-commands)
+See [Custom Workflow Use Cases: Custom init/plan/apply Commands](custom-workflows.md#custom-init-plan-apply-commands)
### Terragrunt
-See [Custom Workflow Use Cases: Terragrunt](custom-workflows.html#terragrunt)
+See [Custom Workflow Use Cases: Terragrunt](custom-workflows.md#terragrunt)
### Running custom commands
-See [Custom Workflow Use Cases: Running custom commands](custom-workflows.html#running-custom-commands)
+See [Custom Workflow Use Cases: Running custom commands](custom-workflows.md#running-custom-commands)
### Terraform Versions
If you'd like to use a different version of Terraform than what is in Atlantis'
@@ -270,7 +270,7 @@ projects:
```
:::warning
`plan_requirements`, `apply_requirements` and `import_requirements` are restricted keys so this repo will need to be configured
-to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.html#repos-can-set-their-own-apply-an-applicable-subcommand).
+to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.md#repos-can-set-their-own-apply-an-applicable-subcommand).
:::
### Order of planning/applying
@@ -345,7 +345,7 @@ autodiscover:
```
With the config above, Atlantis will never try to discover projects, even when there are no
`projects` configured. This is useful if dynamically generating Atlantis config in pre_workflow hooks.
-See [Dynamic Repo Config Generation](pre-workflow-hooks.html#dynamic-repo-config-generation).
+See [Dynamic Repo Config Generation](pre-workflow-hooks.md#dynamic-repo-config-generation).
```yaml
autodiscover:
@@ -360,7 +360,7 @@ Use this feature when some projects require specific configuration in a repo wit
it's still desirable for Atlantis to plan/apply for projects not enumerated in the config.
### Custom Backend Config
-See [Custom Workflow Use Cases: Custom Backend Config](custom-workflows.html#custom-backend-config)
+See [Custom Workflow Use Cases: Custom Backend Config](custom-workflows.md#custom-backend-config)
## Reference
### Top-Level Keys
@@ -372,14 +372,14 @@ projects:
workflows:
allowed_regexp_prefixes:
```
-| Key | Type | Default | Required | Description |
-|-------------------------------|----------------------------------------------------------|---------|----------|--------------------------------------------------------------------------------------------------------------------------------------|
-| version | int | none | **yes** | This key is required and must be set to `3`. |
-| automerge | bool | `false` | no | Automatically merges pull request when all plans are applied. |
-| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. |
-| projects | array[[Project](repo-level-atlantis-yaml.html#project)] | `[]` | no | Lists the projects in this repo. |
-| workflows *(restricted)* | map[string: [Workflow](custom-workflows.html#reference)] | `{}` | no | Custom workflows. |
-| allowed_regexp_prefixes | array[string] | `[]` | no | Lists the allowed regexp prefixes to use when the [`--enable-regexp-cmd`](server-configuration.html#enable-regexp-cmd) flag is used. |
+| Key | Type | Default | Required | Description |
+|-------------------------------|--------------------------------------------------------|---------|----------|------------------------------------------------------------------------------------------------------------------------------------|
+| version | int | none | **yes** | This key is required and must be set to `3`. |
+| automerge | bool | `false` | no | Automatically merges pull request when all plans are applied. |
+| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. |
+| projects | array[[Project](repo-level-atlantis-yaml.md#project)] | `[]` | no | Lists the projects in this repo. |
+| workflows *(restricted)* | map[string: [Workflow](custom-workflows.md#reference)] | `{}` | no | Custom workflows. |
+| allowed_regexp_prefixes | array[string] | `[]` | no | Lists the allowed regexp prefixes to use when the [`--enable-regexp-cmd`](server-configuration.md#enable-regexp-cmd) flag is used. |
### Project
```yaml
@@ -399,27 +399,27 @@ import_requirements: ["approved"]
workflow: myworkflow
```
-| Key | Type | Default | Required | Description |
-|------------------------------------------|-----------------------|-------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| name | string | none | maybe | Required if there is more than one project with the same `dir` and `workspace`. This project name can be used with the `-p` flag. |
-| branch | string | none | no | Regex matching projects by the base branch of pull request (the branch the pull request is getting merged into). Only projects that match the PR's branch will be considered. By default, all branches are matched. |
-| dir | string | none | **yes** | The directory of this project relative to the repo root. For example if the project was under `./project1` then use `project1`. Use `.` to indicate the repo root. |
-| workspace | string | `"default"` | no | The [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) for this project. Atlantis will switch to this workplace when planning/applying and will create it if it doesn't exist. |
-| execution_order_group | int | `0` | no | Index of execution order group. Projects will be sort by this field before planning/applying. |
-| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. |
-| repo_locking | bool | `true` | no | Get a repository lock in this project when plan. |
-| custom_policy_check | bool | `false` | no | Enable using policy check tools other than Conftest |
-| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.html). |
-| terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Must be [Semver compatible](https://semver.org/), ex. `v0.11.0`, `0.12.0-beta1`. |
-| plan_requirements *(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. |
-| apply_requirements *(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. |
-| import_requirements *(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. |
-| workflow *(restricted)* | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow. |
+| Key | Type | Default | Required | Description |
+|------------------------------------------|-----------------------|-------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| name | string | none | maybe | Required if there is more than one project with the same `dir` and `workspace`. This project name can be used with the `-p` flag. |
+| branch | string | none | no | Regex matching projects by the base branch of pull request (the branch the pull request is getting merged into). Only projects that match the PR's branch will be considered. By default, all branches are matched. |
+| dir | string | none | **yes** | The directory of this project relative to the repo root. For example if the project was under `./project1` then use `project1`. Use `.` to indicate the repo root. |
+| workspace | string | `"default"` | no | The [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) for this project. Atlantis will switch to this workplace when planning/applying and will create it if it doesn't exist. |
+| execution_order_group | int | `0` | no | Index of execution order group. Projects will be sort by this field before planning/applying. |
+| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. |
+| repo_locking | bool | `true` | no | Get a repository lock in this project when plan. |
+| custom_policy_check | bool | `false` | no | Enable using policy check tools other than Conftest |
+| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.md). |
+| terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Must be [Semver compatible](https://semver.org/), ex. `v0.11.0`, `0.12.0-beta1`. |
+| plan_requirements *(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. |
+| apply_requirements *(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. |
+| import_requirements *(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. |
+| workflow *(restricted)* | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow. |
::: tip
A project represents a Terraform state. Typically, there is one state per directory and workspace however it's possible to
have multiple states in the same directory using `terraform init -backend-config=custom-config.tfvars`.
-Atlantis supports this but requires the `name` key to be specified. See [Custom Backend Config](custom-workflows.html#custom-backend-config) for more details.
+Atlantis supports this but requires the `name` key to be specified. See [Custom Backend Config](custom-workflows.md#custom-backend-config) for more details.
:::
### Autoplan
@@ -427,7 +427,7 @@ Atlantis supports this but requires the `name` key to be specified. See [Custom
enabled: true
when_modified: ["*.tf", "terragrunt.hcl", ".terraform.lock.hcl"]
```
-| Key | Type | Default | Required | Description |
-|-----------------------|---------------|----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| enabled | boolean | `true` | no | Whether autoplanning is enabled for this project. |
-| when_modified | array[string] | `["**/*.tf*"]` | no | Uses [.dockerignore](https://docs.docker.com/engine/reference/builder/#dockerignore-file) syntax. If any modified file in the pull request matches, this project will be planned. See [Autoplanning](autoplanning.html). Paths are relative to the project's dir. |
+| Key | Type | Default | Required | Description |
+|-----------------------|---------------|----------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| enabled | boolean | `true` | no | Whether autoplanning is enabled for this project. |
+| when_modified | array[string] | `["**/*.tf*"]` | no | Uses [.dockerignore](https://docs.docker.com/engine/reference/builder/#dockerignore-file) syntax. If any modified file in the pull request matches, this project will be planned. See [Autoplanning](autoplanning.md). Paths are relative to the project's dir. |
diff --git a/runatlantis.io/docs/requirements.md b/runatlantis.io/docs/requirements.md
index e300e63fe7..3eaf9552f1 100644
--- a/runatlantis.io/docs/requirements.md
+++ b/runatlantis.io/docs/requirements.md
@@ -9,6 +9,7 @@ Atlantis integrates with the following Git hosts:
* GitHub (public, private or enterprise)
* GitLab (public, private or enterprise)
+* Gitea (public, private and compatible forks like Forgejo)
* Bitbucket Cloud aka bitbucket.org (public or private)
* Bitbucket Server aka Stash
* Azure DevOps
@@ -56,14 +57,14 @@ Atlantis supports any Terraform repository structure, for example:
└── ...
```
With modules, if you want `project1` automatically planned when `module1` is modified
-you need to create an `atlantis.yaml` file. See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#configuring-planning) for more details.
+you need to create an `atlantis.yaml` file. See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#configuring-planning) for more details.
### Terraform Workspaces
*See [Terraform's docs](https://developer.hashicorp.com/terraform/language/state/workspaces) if you are unfamiliar with workspaces.*
If you're using Terraform `>= 0.9.0`, Atlantis supports workspaces through an
`atlantis.yaml` file that tells Atlantis the names of your workspaces
-(see [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#supporting-terraform-workspaces) for more details)
+(see [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#supporting-terraform-workspaces) for more details)
### .tfvars Files
```
@@ -74,7 +75,7 @@ If you're using Terraform `>= 0.9.0`, Atlantis supports workspaces through an
```
For Atlantis to be able to plan automatically with `.tfvars files`, you need to create
an `atlantis.yaml` file to tell it to use `-var-file={YOUR_FILE}`.
-See [atlantis.yaml Use Cases](custom-workflows.html#tfvars-files) for more details.
+See [atlantis.yaml Use Cases](custom-workflows.md#tfvars-files) for more details.
### Multiple Repos
Atlantis supports multiple repos as well–as long as there is a webhook configured
@@ -82,8 +83,8 @@ for each repo.
## Terraform Versions
Atlantis supports all Terraform versions (including 0.12) and can be configured
-to use different versions for different repositories/projects. See [Terraform Versions](terraform-versions.html).
+to use different versions for different repositories/projects. See [Terraform Versions](terraform-versions.md).
## Next Steps
* If your Terraform setup meets the Atlantis requirements, continue the installation
- guide and set up your [Git Host Access Credentials](access-credentials.html)
+ guide and set up your [Git Host Access Credentials](access-credentials.md)
diff --git a/runatlantis.io/docs/security.md b/runatlantis.io/docs/security.md
index a6bafda5a8..959f386764 100644
--- a/runatlantis.io/docs/security.md
+++ b/runatlantis.io/docs/security.md
@@ -60,7 +60,7 @@ or by specifying a malicious provider. This code could then exfiltrate your cred
To prevent this, you could:
1. Bake providers into the Atlantis image or host and deny egress in production.
1. Implement the provider registry protocol internally and deny public egress, that way you control who has write access to the registry.
-1. Modify your [server-side repo configuration](https://www.runatlantis.io/docs/server-side-repo-config.html)'s `plan` step to validate against the
+1. Modify your [server-side repo configuration](server-side-repo-config.md)'s `plan` step to validate against the
use of disallowed providers or data sources or PRs from not allowed users. You could also add in extra validation at this point, e.g.
requiring a "thumbs-up" on the PR before allowing the `plan` to continue. Conftest could be of use here.
diff --git a/runatlantis.io/docs/server-configuration.md b/runatlantis.io/docs/server-configuration.md
index e2722f5478..87892c3dd2 100644
--- a/runatlantis.io/docs/server-configuration.md
+++ b/runatlantis.io/docs/server-configuration.md
@@ -90,7 +90,7 @@ Values are chosen in this order:
# or (recommended)
ATLANTIS_API_SECRET="secret"
```
- Required secret used to validate requests made to the [`/api/*` endpoints](api-endpoints.html).
+ Required secret used to validate requests made to the [`/api/*` endpoints](api-endpoints.md).
### `--atlantis-url`
```bash
@@ -129,7 +129,7 @@ Values are chosen in this order:
ATLANTIS_AUTOMERGE=true
```
Automatically merge pull requests after all plans have been successfully applied.
- Defaults to `false`. See [Automerging](automerging.html) for more details.
+ Defaults to `false`. See [Automerging](automerging.md) for more details.
### `--autoplan-file-list`
```bash
@@ -147,7 +147,7 @@ Values are chosen in this order:
* When not set, defaults to all `.tf`, `.tfvars`, `.tfvars.json`, `terragrunt.hcl` and `.terraform.lock.hcl` files
(`--autoplan-file-list='**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl,**/.terraform.lock.hcl'`).
* Setting `--autoplan-file-list` will override the defaults. You **must** add `**/*.tf` and other defaults if you want to include them.
- * A custom [Workflow](repo-level-atlantis-yaml.html#configuring-planning) that uses autoplan `when_modified` will ignore this value.
+ * A custom [Workflow](repo-level-atlantis-yaml.md#configuring-planning) that uses autoplan `when_modified` will ignore this value.
Examples:
* Autoplan when any `*.tf` or `*.tfvars` file is modified.
@@ -298,7 +298,7 @@ and set `--autoplan-modules` to `false`.
ATLANTIS_BITBUCKET_WEBHOOK_SECRET="secret"
```
Secret used to validate Bitbucket webhooks. Only Bitbucket Server supports webhook secrets.
- For Bitbucket.org, see [Security](security.html#bitbucket-cloud-bitbucket-org) for mitigations.
+ For Bitbucket.org, see [Security](security.md#bitbucket-cloud-bitbucket-org) for mitigations.
::: warning SECURITY WARNING
If not specified, Atlantis won't be able to validate that the incoming webhook call came from Bitbucket.
@@ -312,7 +312,7 @@ and set `--autoplan-modules` to `false`.
ATLANTIS_CHECKOUT_DEPTH=0
```
The number of commits to fetch from the branch. Used if `--checkout-strategy=merge` since the `--checkout-strategy=branch` (default) checkout strategy always defaults to a shallow clone using a depth of 1.
- Defaults to `0`. See [Checkout Strategy](checkout-strategy.html) for more details.
+ Defaults to `0`. See [Checkout Strategy](checkout-strategy.md) for more details.
### `--checkout-strategy`
```bash
@@ -321,7 +321,7 @@ and set `--autoplan-modules` to `false`.
ATLANTIS_CHECKOUT_STRATEGY=""
```
How to check out pull requests. Use either `branch` or `merge`.
- Defaults to `branch`. See [Checkout Strategy](checkout-strategy.html) for more details.
+ Defaults to `branch`. See [Checkout Strategy](checkout-strategy.md) for more details.
### `--config`
```bash
@@ -339,7 +339,7 @@ and set `--autoplan-modules` to `false`.
```
Directory where Atlantis will store its data. Will be created if it doesn't exist.
Defaults to `~/.atlantis`. Atlantis will store its database, checked out repos, Terraform plans and downloaded
- Terraform binaries here. If Atlantis loses this directory, [locks](locking.html)
+ Terraform binaries here. If Atlantis loses this directory, [locks](locking.md)
will be lost and unapplied plans will be lost.
Note that the atlantis user is restricted to `~/.atlantis`.
@@ -352,7 +352,7 @@ and set `--autoplan-modules` to `false`.
ATLANTIS_DEFAULT_TF_VERSION="v0.12.31"
```
Terraform version to default to. Will download to `/bin/terraform`
- if not in `PATH`. See [Terraform Versions](terraform-versions.html) for more details.
+ if not in `PATH`. See [Terraform Versions](terraform-versions.md) for more details.
### `--disable-apply-all`
```bash
@@ -430,7 +430,7 @@ and set `--autoplan-modules` to `false`.
# or
ATLANTIS_ENABLE_POLICY_CHECKS=true
```
- Enables atlantis to run server side policies on the result of a terraform plan. Policies are defined in [server side repo config](https://www.runatlantis.io/docs/server-side-repo-config.html#reference).
+ Enables atlantis to run server side policies on the result of a terraform plan. Policies are defined in [server side repo config](server-side-repo-config.md#reference).
### `--enable-regexp-cmd`
```bash
@@ -442,7 +442,7 @@ and set `--autoplan-modules` to `false`.
This can be used to run all defined projects (with the `name` key) in `atlantis.yaml` using `atlantis plan -p .*`.
- The flag will only allow the regexes listed in the [`allowed_regexp_prefixes`](https://www.runatlantis.io/docs/repo-level-atlantis-yaml.html#reference) key defined in the repo `atlantis.yaml` file. If the key is undefined, its value defaults to `[]` which will allow any regex.
+ The flag will only allow the regexes listed in the [`allowed_regexp_prefixes`](repo-level-atlantis-yaml.md#reference) key defined in the repo `atlantis.yaml` file. If the key is undefined, its value defaults to `[]` which will allow any regex.
This will not work with `-d` yet and to use `-p` the repo projects must be defined in the repo `atlantis.yaml` file.
@@ -472,6 +472,56 @@ and set `--autoplan-modules` to `false`.
Fail and do not run the requested Atlantis command if any of the pre workflow hooks error.
+### `--gitea-base-url`
+ ```bash
+ atlantis server --gitea-base-url="http://your-gitea.corp:7990/basepath"
+ # or
+ ATLANTIS_GITEA_BASE_URL="http://your-gitea.corp:7990/basepath"
+ ```
+ Base URL of Gitea installation. Must include `http://` or `https://`. Defaults to `https://gitea.com` if left empty/absent.
+
+### `--gitea-token`
+ ```bash
+ atlantis server --gitea-token="token"
+ # or (recommended)
+ ATLANTIS_GITEA_TOKEN="token"
+ ```
+ Gitea app password of API user.
+
+### `--gitea-user`
+ ```bash
+ atlantis server --gitea-user="myuser"
+ # or
+ ATLANTIS_GITEA_USER="myuser"
+ ```
+ Gitea username of API user.
+
+### `--gitea-webhook-secret`
+ ```bash
+ atlantis server --gitea-webhook-secret="secret"
+ # or (recommended)
+ ATLANTIS_GITEA_WEBHOOK_SECRET="secret"
+ ```
+ Secret used to validate Gitea webhooks.
+
+ ::: warning SECURITY WARNING
+ If not specified, Atlantis won't be able to validate that the incoming webhook call came from Gitea.
+ This means that an attacker could spoof calls to Atlantis and cause it to perform malicious actions.
+ :::
+
+### `--gitea-page-size`
+ ```bash
+ atlantis server --gitea-page-size=30
+ # or (recommended)
+ ATLANTIS_GITEA_PAGE_SIZE=30
+ ```
+ Number of items on a single page in Gitea paged responses.
+
+ ::: warning Configuration dependent
+ The default value conforms to the Gitea server's standard config setting: DEFAULT_PAGING_NUM
+ The highest valid value depends on the Gitea server's config setting: MAX_RESPONSE_ITEMS
+ :::
+
### `--gh-allow-mergeable-bypass-apply`
```bash
atlantis server --gh-allow-mergeable-bypass-apply
@@ -590,7 +640,7 @@ and set `--autoplan-modules` to `false`.
# or (recommended)
ATLANTIS_GH_WEBHOOK_SECRET="secret"
```
- Secret used to validate GitHub webhooks (see [https://developer.github.com/webhooks/securing/](https://docs.github.com/en/developers/webhooks-and-events/webhooks/securing-your-webhooks)).
+ Secret used to validate GitHub webhooks (see [GitHub: Validating webhook deliveries](https://docs.github.com/en/webhooks/using-webhooks/validating-webhook-deliveries)).
::: warning SECURITY WARNING
If not specified, Atlantis won't be able to validate that the incoming webhook call came from GitHub.
@@ -715,7 +765,7 @@ This is useful when you have many projects and want to keep the pull request cle
# or
ATLANTIS_PARALLEL_APPLY=true
```
- Whether to run apply operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.html#run-plans-and-applies-in-parallel) takes precedence.
+ Whether to run apply operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.md#run-plans-and-applies-in-parallel) takes precedence.
### `--parallel-plan`
```bash
@@ -723,7 +773,7 @@ This is useful when you have many projects and want to keep the pull request cle
# or
ATLANTIS_PARALLEL_PLAN=true
```
- Whether to run plan operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.html#run-plans-and-applies-in-parallel) takes precedence.
+ Whether to run plan operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.md#run-plans-and-applies-in-parallel) takes precedence.
### `--parallel-pool-size`
```bash
@@ -840,7 +890,7 @@ This is useful when you have many projects and want to keep the pull request cle
# or
ATLANTIS_REPO_CONFIG="path/to/repos.yaml"
```
- Path to a YAML server-side repo config file. See [Server Side Repo Config](server-side-repo-config.html).
+ Path to a YAML server-side repo config file. See [Server Side Repo Config](server-side-repo-config.md).
### `--repo-config-json`
```bash
@@ -849,10 +899,10 @@ This is useful when you have many projects and want to keep the pull request cle
ATLANTIS_REPO_CONFIG_JSON='{"repos":[{"id":"/.*/", "apply_requirements":["mergeable"]}]}'
```
Specify server-side repo config as a JSON string. Useful if you don't want to write a config file to disk.
- See [Server Side Repo Config](server-side-repo-config.html) for more details.
+ See [Server Side Repo Config](server-side-repo-config.md) for more details.
::: tip
- If specifying a [Workflow](custom-workflows.html#reference), [step](custom-workflows.html#step)'s
+ If specifying a [Workflow](custom-workflows.md#reference), [step](custom-workflows.md#step)'s
can be specified as follows:
```json
{
@@ -981,7 +1031,7 @@ This is useful when you have many projects and want to keep the pull request cle
# or
ATLANTIS_STATS_NAMESPACE="myatlantis"
```
- Namespace for emitting stats/metrics. See [stats](stats.html) section.
+ Namespace for emitting stats/metrics. See [stats](stats.md) section.
### `--tf-download`
```bash
@@ -1011,7 +1061,7 @@ Setting this to `false` can be useful in an air-gapped environment where a downl
ATLANTIS_TFE_HOSTNAME="my-terraform-enterprise.company.com"
```
Hostname of your Terraform Enterprise installation to be used in conjunction with
- `--tfe-token`. See [Terraform Cloud](terraform-cloud.html) for more details.
+ `--tfe-token`. See [Terraform Cloud](terraform-cloud.md) for more details.
If using Terraform Cloud (i.e. you don't have your own Terraform Enterprise installation)
no need to set since it defaults to `app.terraform.io`.
@@ -1021,7 +1071,7 @@ Setting this to `false` can be useful in an air-gapped environment where a downl
# or
ATLANTIS_TFE_LOCAL_EXECUTION_MODE=true
```
- Enable if you're using local execution mode (instead of TFE/C's remote execution mode). See [Terraform Cloud](terraform-cloud.html) for more details.
+ Enable if you're using local execution mode (instead of TFE/C's remote execution mode). See [Terraform Cloud](terraform-cloud.md) for more details.
### `--tfe-token`
```bash
@@ -1029,7 +1079,7 @@ Setting this to `false` can be useful in an air-gapped environment where a downl
# or (recommended)
ATLANTIS_TFE_TOKEN='xxx.atlasv1.yyy'
```
- A token for Terraform Cloud/Terraform Enterprise integration. See [Terraform Cloud](terraform-cloud.html) for more details.
+ A token for Terraform Cloud/Terraform Enterprise integration. See [Terraform Cloud](terraform-cloud.md) for more details.
### `--use-tf-plugin-cache`
```bash
diff --git a/runatlantis.io/docs/server-side-repo-config.md b/runatlantis.io/docs/server-side-repo-config.md
index 77b44be4fa..55b0346747 100644
--- a/runatlantis.io/docs/server-side-repo-config.md
+++ b/runatlantis.io/docs/server-side-repo-config.md
@@ -18,7 +18,7 @@ the `atlantis server` command via the `--repo-config` flag, ex. `--repo-config=p
If you don't wish to write a config file to disk, you can use the
`--repo-config-json` flag or `ATLANTIS_REPO_CONFIG_JSON` environment variable
-to specify your config as JSON. See [--repo-config-json](server-configuration.html#repo-config-json)
+to specify your config as JSON. See [--repo-config-json](server-configuration.md#repo-config-json)
for an example.
## Example Server Side Repo
@@ -138,7 +138,7 @@ repos:
import_requirements: [approved]
```
-See [Command Requirements](command-requirements.html) for more details.
+See [Command Requirements](command-requirements.md) for more details.
### Requiring PR Is "Mergeable" Before Apply or Import
If you want to require that all (or specific) repos must have pull requests
@@ -164,7 +164,7 @@ repos:
import_requirements: [mergeable]
```
-See [Command Requirements](command-requirements.html) for more details.
+See [Command Requirements](command-requirements.md) for more details.
### Repos Can Set Their Own Apply an applicable subcommand
If you want all (or specific) repos to be able to override the default apply requirements, use
@@ -222,7 +222,7 @@ repos:
- run: |
my bash script inline
```
-See [Pre Workflow Hooks](pre-workflow-hooks.html) for more details on writing
+See [Pre Workflow Hooks](pre-workflow-hooks.md) for more details on writing
pre workflow hooks.
### Running Scripts After Atlantis Workflows
@@ -237,7 +237,7 @@ repos:
- run: |
my bash script inline
```
-See [Post Workflow Hooks](post-workflow-hooks.html) for more details on writing
+See [Post Workflow Hooks](post-workflow-hooks.md) for more details on writing
post workflow hooks.
### Change The Default Atlantis Workflow
@@ -261,7 +261,7 @@ workflows:
- run: my custom apply command
```
-See [Custom Workflows](custom-workflows.html) for more details on writing
+See [Custom Workflows](custom-workflows.md) for more details on writing
custom workflows.
### Allow Repos To Choose A Server-Side Workflow
@@ -345,7 +345,7 @@ There is always a workflow named `default` that corresponds to Atlantis' default
unless you've created your own server-side workflow with that key (overriding it).
:::
-See [Custom Workflows](custom-workflows.html) for more details on writing
+See [Custom Workflows](custom-workflows.md) for more details on writing
custom workflows.
### Allow Using Custom Policy Tools
@@ -392,12 +392,12 @@ workflows:
- run: my custom apply command
```
-See [Custom Workflows](custom-workflows.html) for more details on writing
+See [Custom Workflows](custom-workflows.md) for more details on writing
custom workflows.
### Multiple Atlantis Servers Handle The Same Repository
Running multiple Atlantis servers to handle the same repository can be done to separate permissions for each Atlantis server.
-In this case, a different [atlantis.yaml](repo-level-atlantis-yaml.html) repository config file can be used by using different `repos.yaml` files.
+In this case, a different [atlantis.yaml](repo-level-atlantis-yaml.md) repository config file can be used by using different `repos.yaml` files.
For example, consider a situation where a separate `production-server` atlantis uses repo config `atlantis-production.yaml` and `staging-server` atlantis uses repo config `atlantis-staging.yaml`.
@@ -416,7 +416,7 @@ repos:
```
Then, create `atlantis-production.yaml` and `atlantis-staging.yaml` files in the repository.
-See the configuration examples in [atlantis.yaml](repo-level-atlantis-yaml.html).
+See the configuration examples in [atlantis.yaml](repo-level-atlantis-yaml.md).
```yaml
# atlantis-production.yaml
@@ -438,20 +438,20 @@ Now, 2 webhook URLs can be setup for the repository, which send events to `produ
Each servers handle different repository config files.
:::tip Notes
-* If `no projects` comments are annoying, set [--silence-no-projects](server-configuration.html#silence-no-projects).
-* The command trigger executable name can be reconfigured from `atlantis` to something else by setting [Executable Name](server-configuration.html#executable-name).
+* If `no projects` comments are annoying, set [--silence-no-projects](server-configuration.md#silence-no-projects).
+* The command trigger executable name can be reconfigured from `atlantis` to something else by setting [Executable Name](server-configuration.md#executable-name).
* When using different atlantis server vcs users such as `@atlantis-staging`, the comment `@atlantis-staging plan` can be used instead `atlantis plan` to call `staging-server` only.
:::
## Reference
### Top-Level Keys
-| Key | Type | Default | Required | Description |
-|-----------|---------------------------------------------------------|-----------|----------|---------------------------------------------------------------------------------------|
-| repos | array[[Repo](#repo)] | see below | no | List of repos to apply settings to. |
-| workflows | map[string: [Workflow](custom-workflows.html#workflow)] | see below | no | Map from workflow name to workflow. Workflows override the default Atlantis commands. |
-| policies | Policies. | none | no | List of policy sets to run and associated metadata |
-| metrics | Metrics. | none | no | Map of metric configuration |
+| Key | Type | Default | Required | Description |
+|-----------|-------------------------------------------------------|-----------|----------|---------------------------------------------------------------------------------------|
+| repos | array[[Repo](#repo)] | see below | no | List of repos to apply settings to. |
+| workflows | map[string: [Workflow](custom-workflows.md#workflow)] | see below | no | Map from workflow name to workflow. Workflows override the default Atlantis commands. |
+| policies | Policies. | none | no | List of policy sets to run and associated metadata |
+| metrics | Metrics. | none | no | Map of metric configuration |
::: tip A Note On Defaults
@@ -485,24 +485,23 @@ If you set a workflow with the key `default`, it will override this.
:::
### Repo
-| Key | Type | Default | Required | Description |
-|-------------------------------|----------|---------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| id | string | none | yes | Value can be a regular expression when specified as /<regex>/ or an exact string match. Repo IDs are of the form `{vcs hostname}/{org}/{name}`, ex. `github.com/owner/repo`. Hostname is specified without scheme or port. For Bitbucket Server, {org} is the **name** of the project, not the key. |
-| branch | string | none | no | An regex matching pull requests by base branch (the branch the pull request is getting merged into). By default, all branches are matched |
-| repo_config_file | string | none | no | Repo config file path in this repo. By default, use `atlantis.yaml` which is located on repository root. When multiple atlantis servers work with the same repo, please set different file names. |
-| workflow | string | none | no | A custom workflow.
-| plan_requirements | []string | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | |
-| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. |
-| import_requirements | []string | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. |
-| allowed_overrides | []string | none | no | A list of restricted keys that `atlantis.yaml` files can override. The only supported keys are `apply_requirements`, `workflow`, `delete_source_branch_on_merge`,`repo_locking`, and `custom_policy_check` |
-| allowed_workflows | []string | none | no | A list of workflows that `atlantis.yaml` files can select from. |
-| allow_custom_workflows | bool | false | no | Whether or not to allow [Custom Workflows](custom-workflows.html). |
-| delete_source_branch_on_merge | bool | false | no | Whether or not to delete the source branch on merge. |
-| repo_locking | bool | false | no | Whether or not to get a lock. |
-| policy_check | bool | false | no | Whether or not to run policy checks on this repository. |
-| custom_policy_check | bool | false | no | Whether or not to enable custom policy check tools outside of Conftest on this repository. |
-| autodiscover | AutoDiscover | none | no | Auto discover settings for this repo
-
+| Key | Type | Default | Required | Description |
+|-------------------------------|--------------|---------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| id | string | none | yes | Value can be a regular expression when specified as /<regex>/ or an exact string match. Repo IDs are of the form `{vcs hostname}/{org}/{name}`, ex. `github.com/owner/repo`. Hostname is specified without scheme or port. For Bitbucket Server, {org} is the **name** of the project, not the key. |
+| branch | string | none | no | An regex matching pull requests by base branch (the branch the pull request is getting merged into). By default, all branches are matched |
+| repo_config_file | string | none | no | Repo config file path in this repo. By default, use `atlantis.yaml` which is located on repository root. When multiple atlantis servers work with the same repo, please set different file names. |
+| workflow | string | none | no | A custom workflow. |
+| plan_requirements | []string | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | |
+| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. |
+| import_requirements | []string | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. |
+| allowed_overrides | []string | none | no | A list of restricted keys that `atlantis.yaml` files can override. The only supported keys are `apply_requirements`, `workflow`, `delete_source_branch_on_merge`,`repo_locking`, and `custom_policy_check` |
+| allowed_workflows | []string | none | no | A list of workflows that `atlantis.yaml` files can select from. |
+| allow_custom_workflows | bool | false | no | Whether or not to allow [Custom Workflows](custom-workflows.md). |
+| delete_source_branch_on_merge | bool | false | no | Whether or not to delete the source branch on merge. |
+| repo_locking | bool | false | no | Whether or not to get a lock. |
+| policy_check | bool | false | no | Whether or not to run policy checks on this repository. |
+| custom_policy_check | bool | false | no | Whether or not to enable custom policy check tools outside of Conftest on this repository. |
+| autodiscover | AutoDiscover | none | no | Auto discover settings for this repo |
:::tip Notes
* If multiple repos match, the last match will apply.
diff --git a/runatlantis.io/docs/stats.md b/runatlantis.io/docs/stats.md
index a2980c5634..39ef9f88f0 100644
--- a/runatlantis.io/docs/stats.md
+++ b/runatlantis.io/docs/stats.md
@@ -8,11 +8,11 @@ Currently Statsd and Prometheus is supported. See configuration below for detail
## Configuration
-Metrics are configured through the [Server Side Config](server-side-repo-config.html#metrics).
+Metrics are configured through the [Server Side Config](server-side-repo-config.md#metrics).
## Available Metrics
-Assuming metrics are exposed from the endpoint `/metrics` from the [metrics](server-side-repo-config.html#metrics) server side config e.g.
+Assuming metrics are exposed from the endpoint `/metrics` from the [metrics](server-side-repo-config.md#metrics) server side config e.g.
```yaml
@@ -54,13 +54,13 @@ The output shown above is trimmed, since with every new version release this met
Important metrics to monitor are
-| Metric Name | Metric Type | Purpose |
-|------------------------------------------------|----------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------|
-| `atlantis_cmd_autoplan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.html#autoplanning) has thrown error. |
-| `atlantis_cmd_comment_plan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis plan` has thrown error. |
-| `atlantis_cmd_autoplan_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.html#autoplanning) has run successfully. |
-| `atlantis_cmd_comment_apply_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has thrown error. |
-| `atlantis_cmd_comment_apply_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has run successfully. |
+| Metric Name | Metric Type | Purpose |
+|------------------------------------------------|----------------------------------------------------------------------|-------------------------------------------------------------------------------------|
+| `atlantis_cmd_autoplan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.md#autoplanning) has thrown error. |
+| `atlantis_cmd_comment_plan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis plan` has thrown error. |
+| `atlantis_cmd_autoplan_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.md#autoplanning) has run successfully. |
+| `atlantis_cmd_comment_apply_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has thrown error. |
+| `atlantis_cmd_comment_apply_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has run successfully. |
::: tip NOTE
There are plenty of additional metrics exposed by atlantis that are not described above.
diff --git a/runatlantis.io/docs/terraform-versions.md b/runatlantis.io/docs/terraform-versions.md
index 79fdee0db3..b38cebf9d7 100644
--- a/runatlantis.io/docs/terraform-versions.md
+++ b/runatlantis.io/docs/terraform-versions.md
@@ -12,7 +12,7 @@ projects:
- dir: .
terraform_version: v1.1.5
```
-See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#terraform-versions) for more details.
+See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#terraform-versions) for more details.
## Via terraform config
Alternatively, one can use the terraform configuration block's `required_version` key to specify an exact version (`x.y.z` or `= x.y.z`), or as of [atlantis v0.21.0](https://github.com/runatlantis/atlantis/releases/tag/v0.21.0), a comparison or pessimistic [version constraint](https://developer.hashicorp.com/terraform/language/expressions/version-constraints#version-constraint-syntax):
@@ -44,7 +44,7 @@ See [Terraform `required_version`](https://developer.hashicorp.com/terraform/lan
::: tip NOTE
Atlantis will automatically download the latest version that fulfills the constraint specified.
-A `terraform_version` specified in the `atlantis.yaml` file takes precedence over both the [`--default-tf-version`](server-configuration.html#default-tf-version) flag and the `required_version` in the terraform hcl.
+A `terraform_version` specified in the `atlantis.yaml` file takes precedence over both the [`--default-tf-version`](server-configuration.md#default-tf-version) flag and the `required_version` in the terraform hcl.
:::
::: tip NOTE
diff --git a/runatlantis.io/docs/troubleshooting-https.md b/runatlantis.io/docs/troubleshooting-https.md
index 191a4b1242..dc4c7a0541 100644
--- a/runatlantis.io/docs/troubleshooting-https.md
+++ b/runatlantis.io/docs/troubleshooting-https.md
@@ -18,7 +18,7 @@ If you have this error when specifying a TLS cert with a key:
```
Check that the locally signed certificate authority is prepended to the self signed certificate.
-A good example is shown at [Seth Vargo terraform implementation of atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64)
+A good example is shown at [Seth Vargo terraform implementation of atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64-L84)
For Go specific TLS resources have a look at the repository by [denji called golang-tls](https://github.com/denji/golang-tls).
diff --git a/runatlantis.io/docs/upgrading-atlantis-yaml.md b/runatlantis.io/docs/upgrading-atlantis-yaml.md
index 1b8fe7aaa0..8b7f2cc384 100644
--- a/runatlantis.io/docs/upgrading-atlantis-yaml.md
+++ b/runatlantis.io/docs/upgrading-atlantis-yaml.md
@@ -3,7 +3,7 @@
## Upgrading From v2 To v3
Atlantis version `v0.7.0` introduced a new version 3 of `atlantis.yaml`.
-**If you're not using [custom `run` steps](custom-workflows.html#custom-run-command),
+**If you're not using [custom `run` steps](custom-workflows.md#custom-run-command),
then you can upgrade from `version: 2` to `version: 3` without any changes.**
**NOTE:** Version 2 **is not being deprecated** and there is no need to upgrade your version
diff --git a/runatlantis.io/docs/using-atlantis.md b/runatlantis.io/docs/using-atlantis.md
index 15a0b5a681..acbe7e27e4 100644
--- a/runatlantis.io/docs/using-atlantis.md
+++ b/runatlantis.io/docs/using-atlantis.md
@@ -6,7 +6,7 @@ Atlantis triggers commands via pull request comments.
::: tip
You can use following executable names.
* `atlantis help`
- * `atlantis` is executable name. You can configure by [Executable Name](/docs/server-configuration.html#executable-name).
+ * `atlantis` is executable name. You can configure by [Executable Name](server-configuration.md#executable-name).
* `run help`
* `run` is a global executable name.
* `@GithubUser help`
@@ -62,7 +62,7 @@ atlantis plan -w staging
### Options
* `-d directory` Which directory to run plan in relative to root of repo. Use `.` for root.
* Ex. `atlantis plan -d child/dir`
-* `-p project` Which project to run plan for. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w` because the project defines this already.
+* `-p project` Which project to run plan for. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.md). Cannot be used at same time as `-d` or `-w` because the project defines this already.
* `-w workspace` Switch to this [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) before planning. Defaults to `default`. Ignore this if Terraform workspaces are unused.
* `--verbose` Append Atlantis log to comment.
@@ -77,7 +77,7 @@ you can append them to the end of the comment after `--`, ex.
```
atlantis plan -d dir -- -var foo='bar'
```
-If you always need to append a certain flag, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands).
+If you always need to append a certain flag, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands).
### Using the -destroy Flag
@@ -124,9 +124,9 @@ atlantis apply -w staging
### Options
* `-d directory` Apply the plan for this directory, relative to root of repo. Use `.` for root.
-* `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w`.
+* `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.md). Cannot be used at same time as `-d` or `-w`.
* `-w workspace` Apply the plan for this [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused.
-* `--auto-merge-disabled` Disable [automerge](automerging.html) for this apply command.
+* `--auto-merge-disabled` Disable [automerge](automerging.md) for this apply command.
* `--verbose` Append Atlantis log to comment.
### Additional Terraform flags
@@ -148,7 +148,7 @@ atlantis import [options] ADDRESS ID -- [terraform import flags]
Runs `terraform import` that matches the directory/project/workspace.
This command discards the terraform plan result. After an import and before an apply, another `atlantis plan` must be run again.
-To allow the `import` command requires [--allow-commands](/docs/server-configuration.html#allow-commands) configuration.
+To allow the `import` command requires [--allow-commands](server-configuration.md#allow-commands) configuration.
### Examples
```bash
@@ -172,7 +172,7 @@ atlantis import -w staging ADDRESS ID
### Options
* `-d directory` Import a resource for this directory, relative to root of repo. Use `.` for root.
-* `-p project` Import a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.html) repo configuration file. This cannot be used at the same time as `-d` or `-w`.
+* `-p project` Import a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.md) repo configuration file. This cannot be used at the same time as `-d` or `-w`.
* `-w workspace` Import a resource for a specific [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused.
### Additional Terraform flags
@@ -182,7 +182,7 @@ append them to the end of the comment after `--`, e.g.
```
atlantis import -d dir 'aws_instance.example["foo"]' i-1234567890abcdef0 -- -var foo='bar'
```
-If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands).
+If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands).
---
## atlantis state rm
@@ -193,7 +193,7 @@ atlantis state [options] rm ADDRESS... -- [terraform state rm flags]
Runs `terraform state rm` that matches the directory/project/workspace.
This command discards the terraform plan result. After run state rm and before an apply, another `atlantis plan` must be run again.
-To allow the `state` command requires [--allow-commands](/docs/server-configuration.html#allow-commands) configuration.
+To allow the `state` command requires [--allow-commands](server-configuration.md#allow-commands) configuration.
### Examples
```bash
@@ -217,7 +217,7 @@ atlantis state -w staging rm ADDRESS
### Options
* `-d directory` Run state rm a resource for this directory, relative to root of repo. Use `.` for root.
-* `-p project` Run state rm a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.html) repo configuration file. This cannot be used at the same time as `-d` or `-w`.
+* `-p project` Run state rm a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.md) repo configuration file. This cannot be used at the same time as `-d` or `-w`.
* `-w workspace` Run state rm a resource for a specific [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused.
### Additional Terraform flags
@@ -227,7 +227,7 @@ append them to the end of the comment after `--`, e.g.
```
atlantis state -d dir rm 'aws_instance.example["foo"]' -- -lock=false
```
-If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands).
+If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands).
---
## atlantis unlock
@@ -248,7 +248,7 @@ atlantis approve_policies
### Explanation
Approves all current policy checking failures for the PR.
-See also [policy checking](/docs/policy-checking.html).
+See also [policy checking](policy-checking.md).
### Options
* `--verbose` Append Atlantis log to comment.
diff --git a/runatlantis.io/docs/using-slack-hooks.md b/runatlantis.io/docs/using-slack-hooks.md
index c75c243fca..78955a5100 100644
--- a/runatlantis.io/docs/using-slack-hooks.md
+++ b/runatlantis.io/docs/using-slack-hooks.md
@@ -13,7 +13,7 @@ For this you'll need to:
## Configuring Slack for Atlantis
-* Go to [https://api.slack.com/apps](https://api.slack.com/apps)
+* Go to [Slack: Apps](https://api.slack.com/apps)
* Click the `Create New App` button
* Select `From scratch` in the dialog that opens
* Give it a name, e.g. `atlantis-bot`.
diff --git a/runatlantis.io/docs/webhook-secrets.md b/runatlantis.io/docs/webhook-secrets.md
index 8b66ee8276..050f548a83 100644
--- a/runatlantis.io/docs/webhook-secrets.md
+++ b/runatlantis.io/docs/webhook-secrets.md
@@ -17,12 +17,12 @@ Azure DevOps uses Basic authentication for webhooks rather than webhook secrets.
:::
::: tip NOTE
-An app-wide token is generated during [GitHub App setup](access-credentials.html#github-app). You can recover it by navigating to the [GitHub app settings page](https://github.com/settings/apps) and selecting "Edit" next to your Atlantis app's name. Token appears after clicking "Edit" under the Webhook header.
+An app-wide token is generated during [GitHub App setup](access-credentials.md#github-app). You can recover it by navigating to the [GitHub app settings page](https://github.com/settings/apps) and selecting "Edit" next to your Atlantis app's name. Token appears after clicking "Edit" under the Webhook header.
:::
::: warning
Bitbucket.org **does not** support webhook secrets.
-To mitigate, use repo allowlists and IP allowlists. See [Security](security.html#bitbucket-cloud-bitbucket-org) for more information.
+To mitigate, use repo allowlists and IP allowlists. See [Security](security.md#bitbucket-cloud-bitbucket-org) for more information.
:::
## Generating A Webhook Secret
@@ -30,7 +30,7 @@ You can use any random string generator to create your Webhook secret. It should
For example:
* Generate via Ruby with `ruby -rsecurerandom -e 'puts SecureRandom.hex(32)'`
-* Generate online with [https://www.browserling.com/tools/random-string](https://www.browserling.com/tools/random-string)
+* Generate online with [browserling: Generate Random Strings and Numbers](https://www.browserling.com/tools/random-string)
::: tip NOTE
You must use **the same** webhook secret for each repo.
@@ -38,6 +38,6 @@ You must use **the same** webhook secret for each repo.
## Next Steps
* Record your secret
-* You'll be using it later to [configure your webhooks](configuring-webhooks.html), however if you're
-following the [Installation Guide](installation-guide.html) then your next step is to
-[Deploy Atlantis](deployment.html)
+* You'll be using it later to [configure your webhooks](configuring-webhooks.md), however if you're
+following the [Installation Guide](installation-guide.md) then your next step is to
+[Deploy Atlantis](deployment.md)
diff --git a/runatlantis.io/guide/README.md b/runatlantis.io/guide/README.md
index 15472518b8..0b163636b4 100644
--- a/runatlantis.io/guide/README.md
+++ b/runatlantis.io/guide/README.md
@@ -1,12 +1,12 @@
# Introduction
## Getting Started
-* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.html).
-* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html).
-* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html).
+* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.md).
+* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.md).
+* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](../docs/installation-guide.md).
::: tip Looking for the full docs?
-Go here: [www.runatlantis.io/docs](/docs/)
+Go here: [www.runatlantis.io/docs](../docs/README.md)
:::
## Overview – What Is Atlantis?
@@ -55,6 +55,6 @@ The exact commands that Atlantis runs are configurable. You can run custom scrip
to construct your ideal workflow.
## Next Steps
-* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.html).
-* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html).
-* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html).
+* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.md).
+* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.md).
+* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](../docs/installation-guide.md).
diff --git a/runatlantis.io/guide/test-drive.md b/runatlantis.io/guide/test-drive.md
index 22e8c77f21..08970ea110 100644
--- a/runatlantis.io/guide/test-drive.md
+++ b/runatlantis.io/guide/test-drive.md
@@ -1,6 +1,6 @@
# Test Drive
-To test drive Atlantis on an example repo, download the latest release:
-[https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases)
+To test drive Atlantis on an example repo, download the latest release from
+[GitHub](https://github.com/runatlantis/atlantis/releases)
Once you've extracted the archive, run:
```bash
@@ -14,5 +14,5 @@ This mode sets up Atlantis on a test repo so you can try it out. It will
- Start Atlantis so you can execute commands on the pull request
## Next Steps
-* If you're ready to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html).
-* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html).
+* If you're ready to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.md).
+* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](../docs/installation-guide.md).
diff --git a/runatlantis.io/guide/testing-locally.md b/runatlantis.io/guide/testing-locally.md
index 054b0d9c2a..efe5234a3e 100644
--- a/runatlantis.io/guide/testing-locally.md
+++ b/runatlantis.io/guide/testing-locally.md
@@ -3,7 +3,7 @@ These instructions are for running Atlantis **locally on your own computer** so
your own repositories before deciding whether to install it more permanently.
::: tip
-If you want to set up a production-ready Atlantis installation, read [Deployment](../docs/deployment.html).
+If you want to set up a production-ready Atlantis installation, read [Deployment](../docs/deployment.md).
:::
Steps:
@@ -12,13 +12,13 @@ Steps:
## Install Terraform
`terraform` needs to be in the `$PATH` for Atlantis.
-Download from [https://developer.hashicorp.com/terraform/downloads](https://developer.hashicorp.com/terraform/downloads)
+Download from [Terraform](https://developer.hashicorp.com/terraform/downloads)
```
unzip path/to/terraform_*.zip -d /usr/local/bin
```
## Download Atlantis
-Get the latest release from [https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases)
+Get the latest release from [GitHub](https://github.com/runatlantis/atlantis/releases)
and unpackage it.
## Download Ngrok
@@ -26,7 +26,7 @@ Atlantis needs to be accessible somewhere that github.com/gitlab.com/bitbucket.o
One way to accomplish this is with ngrok, a tool that forwards your local port to a random
public hostname.
-Go to [https://ngrok.com/download](https://ngrok.com/download), download ngrok and `unzip` it.
+[Download](https://ngrok.com/download) ngrok and `unzip` it.
Start `ngrok` on port `4141` and take note of the hostname it gives you:
```bash
@@ -47,7 +47,7 @@ Bitbucket Cloud (bitbucket.org) doesn't use webhook secrets so if you're using B
When you're ready to do a production deploy of Atlantis you should allowlist [Bitbucket IPs](https://confluence.atlassian.com/bitbucket/what-are-the-bitbucket-cloud-ip-addresses-i-should-use-to-configure-my-corporate-firewall-343343385.html)
to ensure the webhooks are coming from them.
:::
-Create a random string of any length (you can use [https://www.random.org/strings/](https://www.random.org/strings/))
+Create a random string of any length (you can use [random.org](https://www.random.org/strings/))
and set an environment variable:
```
SECRET="{YOUR_RANDOM_STRING}"
@@ -140,6 +140,36 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
+### Gitea Webhook
+
+ Expand
+
+ Click Settings > Webhooks in the top- and then sidebar
+ Click Add webhook > Gitea (Gitea webhooks are service specific, but this works)
+ set Target URL to http://$URL/events
(or https://$URL/events
if you're using SSL) where $URL
is where Atlantis is hosted. Be sure to add /events
+ double-check you added /events
to the end of your URL.
+ set Secret to the Webhook Secret you generated previously
+
+ NOTE If you're adding a webhook to multiple repositories, each repository will need to use the same secret.
+
+
+ Select Custom Events...
+ Check the boxes
+
+ Repository events > Push
+ Issue events > Issue Comment
+ Pull Request events > Pull Request
+ Pull Request events > Pull Request Comment
+ Pull Request events > Pull Request Reviewed
+ Pull Request events > Pull Request Synchronized
+
+
+ Leave Active checked
+ Click Add Webhook
+ See Next Steps
+
+
+
## Create an access token for Atlantis
We recommend using a dedicated CI user or creating a new user named **@atlantis** that performs all API actions, however for testing,
@@ -155,7 +185,7 @@ TOKEN="{YOUR_TOKEN}"
```
### GitLab or GitLab Enterprise Access Token
-- follow [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token)
+- follow [GitLab: Create a personal access token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token)
- create a token with **api** scope
- set the token as an environment variable
```
@@ -163,7 +193,7 @@ TOKEN="{YOUR_TOKEN}"
```
### Bitbucket Cloud (bitbucket.org) Access Token
-- follow [https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/)
+- follow [BitBucket Cloud: Create an app password](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/)
- Label the password "atlantis"
- Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them
- set the token as an environment variable
@@ -183,6 +213,13 @@ TOKEN="{YOUR_TOKEN}"
TOKEN="{YOUR_TOKEN}"
```
+### Gite Access Token
+- Go to "Profile and Settings" > "Settings" in Gitea (top-right)
+- Go to "Applications" under "User Settings" in Gitea
+- Create a token under the "Manage Access Tokens" with the following permissions:
+ - issue: Read and Write
+ - repository: Read and Write
+- Record the access token
## Start Atlantis
You're almost ready to start Atlantis, just set two more variables:
@@ -278,6 +315,21 @@ atlantis server \
--ssl-key-file=file.key
```
+### Gitea
+
+```bash
+atlantis server \
+--atlantis-url="$URL" \
+--gitea-user="$ATLANTIS_GITEA_USER" \
+--gitea-token="$ATLANTIS_GITEA_TOKEN" \
+--gitea-webhook-secret="$ATLANTIS_GITEA_WEBHOOK_SECRET" \
+--gitea-base-url="$ATLANTIS_GITEA_BASE_URL" \
+--gitea-page-size="$ATLANTIS_GITEA_PAGE_SIZE" \
+--repo-allowlist="$REPO_ALLOWLIST"
+--ssl-cert-file=file.crt
+--ssl-key-file=file.key
+```
+
## Create a pull request
Create a pull request so you can test Atlantis.
::: tip
@@ -293,7 +345,7 @@ You should see Atlantis logging about receiving the webhook and you should see t
Atlantis tries to figure out the directory to plan in based on the files modified.
If you need to customize the directories that Atlantis runs in or the commands it runs if you're using workspaces
-or `.tfvars` files, see [atlantis.yaml Reference](/docs/repo-level-atlantis-yaml.html#reference).
+or `.tfvars` files, see [atlantis.yaml Reference](../docs/repo-level-atlantis-yaml.md#reference).
### Manual Plan
To manually `plan` in a specific directory or workspace, comment on the pull request using the `-d` or `-w` flags:
@@ -312,12 +364,12 @@ If you'd like to `apply`, type a comment: `atlantis apply`. You can use the `-d`
Atlantis at a specific plan. Otherwise it tries to apply the plan for the root directory.
## Real-time logs
-The [real-time terraform output](/docs/streaming-logs.md) for your command can be found by clicking into the status check for a given project in a PR which
+The [real-time terraform output](../docs/streaming-logs.md) for your command can be found by clicking into the status check for a given project in a PR which
links to the log-streaming UI. This is a terminal UI where you can view your commands executing in real-time.
## Next Steps
* If things are working as expected you can `Ctrl-C` the `atlantis server` command and the `ngrok` command.
-* Hopefully Atlantis is working with your repo and you're ready to move on to a [production-ready deployment](../docs/deployment.html).
+* Hopefully Atlantis is working with your repo and you're ready to move on to a [production-ready deployment](../docs/deployment.md).
* If it's not working as expected, you may need to customize how Atlantis runs with an `atlantis.yaml` file.
-See [atlantis.yaml use cases](/docs/repo-level-atlantis-yaml.html#use-cases).
-* Check out our [full documentation](../docs/) for more details.
+See [atlantis.yaml use cases](../docs/repo-level-atlantis-yaml.md#use-cases).
+* Check out our [full documentation](../docs/README.md) for more details.
diff --git a/scripts/download-release.sh b/scripts/download-release.sh
new file mode 100755
index 0000000000..9b3ea574d3
--- /dev/null
+++ b/scripts/download-release.sh
@@ -0,0 +1,33 @@
+#!/bin/sh
+COMMAND_NAME=${1:-terraform}
+TARGETPLATFORM=${2:-"linux/amd64"}
+DEFAULT_VERSION=${3:-"1.6.2"}
+AVAILABLE_VERSIONS=${4:-"1.6.2"}
+case "${TARGETPLATFORM}" in
+ "linux/amd64") ARCH=amd64 ;;
+ "linux/arm64") ARCH=arm64 ;;
+ "linux/arm/v7") ARCH=arm ;;
+ *) echo "ERROR: 'TARGETPLATFORM' value unexpected: ${TARGETPLATFORM}"; exit 1 ;;
+esac
+for VERSION in ${AVAILABLE_VERSIONS}; do
+ case "${COMMAND_NAME}" in
+ "terraform")
+ DOWNLOAD_URL_FORMAT=$(printf 'https://releases.hashicorp.com/terraform/%s/%s_%s' "$VERSION" "$COMMAND_NAME" "$VERSION")
+ COMMAND_DIR=/usr/local/bin/tf
+ ;;
+ "tofu")
+ DOWNLOAD_URL_FORMAT=$(printf 'https://github.com/opentofu/opentofu/releases/download/v%s/%s_%s' "$VERSION" "$COMMAND_NAME" "$VERSION")
+ COMMAND_DIR=/usr/local/bin/opentofu
+ ;;
+ *) echo "ERROR: 'COMMAND_NAME' value unexpected: ${COMMAND_NAME}"; exit 1 ;;
+ esac
+ curl -LOs "${DOWNLOAD_URL_FORMAT}_linux_${ARCH}.zip"
+ curl -LOs "${DOWNLOAD_URL_FORMAT}_SHA256SUMS"
+ sed -n "/${COMMAND_NAME}_${VERSION}_linux_${ARCH}.zip/p" "${COMMAND_NAME}_${VERSION}_SHA256SUMS" | sha256sum -c
+ mkdir -p "${COMMAND_DIR}/${VERSION}"
+ unzip "${COMMAND_NAME}_${VERSION}_linux_${ARCH}.zip" -d "${COMMAND_DIR}/${VERSION}"
+ ln -s "${COMMAND_DIR}/${VERSION}/${COMMAND_NAME}" "${COMMAND_NAME}${VERSION}"
+ rm "${COMMAND_NAME}_${VERSION}_linux_${ARCH}.zip"
+ rm "${COMMAND_NAME}_${VERSION}_SHA256SUMS"
+done
+ln -s "${COMMAND_DIR}/${DEFAULT_VERSION}/${COMMAND_NAME}" "${COMMAND_NAME}"
diff --git a/server/controllers/events/events_controller.go b/server/controllers/events/events_controller.go
index 2246a8f48b..91a7bf2592 100644
--- a/server/controllers/events/events_controller.go
+++ b/server/controllers/events/events_controller.go
@@ -14,6 +14,7 @@
package events
import (
+ "encoding/json"
"fmt"
"io"
"net/http"
@@ -28,6 +29,7 @@ import (
"github.com/runatlantis/atlantis/server/events/vcs"
"github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud"
"github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver"
+ "github.com/runatlantis/atlantis/server/events/vcs/gitea"
"github.com/runatlantis/atlantis/server/logging"
tally "github.com/uber-go/tally/v4"
gitlab "github.com/xanzy/go-gitlab"
@@ -37,6 +39,11 @@ const githubHeader = "X-Github-Event"
const gitlabHeader = "X-Gitlab-Event"
const azuredevopsHeader = "Request-Id"
+const giteaHeader = "X-Gitea-Event"
+const giteaEventTypeHeader = "X-Gitea-Event-Type"
+const giteaSignatureHeader = "X-Gitea-Signature"
+const giteaRequestIDHeader = "X-Gitea-Delivery"
+
// bitbucketEventTypeHeader is the same in both cloud and server.
const bitbucketEventTypeHeader = "X-Event-Key"
const bitbucketCloudRequestIDHeader = "X-Request-UUID"
@@ -91,11 +98,20 @@ type VCSEventsController struct {
// Azure DevOps Team Project. If empty, no request validation is done.
AzureDevopsWebhookBasicPassword []byte
AzureDevopsRequestValidator AzureDevopsRequestValidator
+ GiteaWebhookSecret []byte
}
// Post handles POST webhook requests.
func (e *VCSEventsController) Post(w http.ResponseWriter, r *http.Request) {
- if r.Header.Get(githubHeader) != "" {
+ if r.Header.Get(giteaHeader) != "" {
+ if !e.supportsHost(models.Gitea) {
+ e.respond(w, logging.Debug, http.StatusBadRequest, "Ignoring request since not configured to support Gitea")
+ return
+ }
+ e.Logger.Debug("handling Gitea post")
+ e.handleGiteaPost(w, r)
+ return
+ } else if r.Header.Get(githubHeader) != "" {
if !e.supportsHost(models.Github) {
e.respond(w, logging.Debug, http.StatusBadRequest, "Ignoring request since not configured to support GitHub")
return
@@ -288,6 +304,91 @@ func (e *VCSEventsController) handleAzureDevopsPost(w http.ResponseWriter, r *ht
}
}
+func (e *VCSEventsController) handleGiteaPost(w http.ResponseWriter, r *http.Request) {
+ signature := r.Header.Get(giteaSignatureHeader)
+ eventType := r.Header.Get(giteaEventTypeHeader)
+ reqID := r.Header.Get(giteaRequestIDHeader)
+
+ defer r.Body.Close() // Ensure the request body is closed
+
+ body, err := io.ReadAll(r.Body)
+ if err != nil {
+ e.respond(w, logging.Error, http.StatusBadRequest, "Unable to read body: %s %s=%s", err, "X-Gitea-Delivery", reqID)
+ return
+ }
+
+ if len(e.GiteaWebhookSecret) > 0 {
+ if err := gitea.ValidateSignature(body, signature, e.GiteaWebhookSecret); err != nil {
+ e.respond(w, logging.Warn, http.StatusBadRequest, errors.Wrap(err, "request did not pass validation").Error())
+ return
+ }
+ }
+
+ // Log the event type for debugging purposes
+ e.Logger.Debug("Received Gitea event %s with ID %s", eventType, reqID)
+
+ // Depending on the event type, handle the event appropriately
+ switch eventType {
+ case "pull_request_comment":
+ e.HandleGiteaPullRequestCommentEvent(w, body, reqID)
+ case "pull_request":
+ e.Logger.Debug("Handling as pull_request")
+ e.handleGiteaPullRequestEvent(w, body, reqID)
+ // Add other case handlers as necessary
+ default:
+ e.respond(w, logging.Debug, http.StatusOK, "Ignoring unsupported Gitea event type: %s %s=%s", eventType, "X-Gitea-Delivery", reqID)
+ }
+}
+
+func (e *VCSEventsController) handleGiteaPullRequestEvent(w http.ResponseWriter, body []byte, reqID string) {
+ e.Logger.Debug("Entering handleGiteaPullRequestEvent")
+ // Attempt to unmarshal the incoming body into the Gitea PullRequest struct
+ var payload gitea.GiteaWebhookPayload
+ if err := json.Unmarshal(body, &payload); err != nil {
+ e.Logger.Err("Failed to unmarshal Gitea webhook payload: %v", err)
+ e.respond(w, logging.Error, http.StatusBadRequest, "Failed to parse request body")
+ return
+ }
+
+ e.Logger.Debug("Successfully unmarshaled Gitea event")
+
+ // Use the parser function to convert into Atlantis models
+ pull, pullEventType, baseRepo, headRepo, user, err := e.Parser.ParseGiteaPullRequestEvent(payload.PullRequest)
+ if err != nil {
+ e.Logger.Err("Failed to parse Gitea pull request event: %v", err)
+ e.respond(w, logging.Error, http.StatusInternalServerError, "Failed to process event")
+ return
+ }
+
+ e.Logger.Debug("Parsed Gitea event into Atlantis models successfully")
+
+ logger := e.Logger.With("gitea-request-id", reqID)
+ logger.Debug("Identified Gitea event as type", "type", pullEventType)
+
+ // Call a generic handler for pull request events
+ response := e.handlePullRequestEvent(logger, baseRepo, headRepo, pull, user, pullEventType)
+
+ e.respond(w, logging.Debug, http.StatusOK, response.body)
+}
+
+// HandleGiteaCommentEvent handles comment events from Gitea where Atlantis commands can come from.
+func (e *VCSEventsController) HandleGiteaPullRequestCommentEvent(w http.ResponseWriter, body []byte, reqID string) {
+ var event gitea.GiteaIssueCommentPayload
+ if err := json.Unmarshal(body, &event); err != nil {
+ e.Logger.Err("Failed to unmarshal Gitea comment payload: %v", err)
+ e.respond(w, logging.Error, http.StatusBadRequest, "Failed to parse request body")
+ return
+ }
+ e.Logger.Debug("Successfully unmarshaled Gitea comment event")
+
+ baseRepo, user, pullNum, _ := e.Parser.ParseGiteaIssueCommentEvent(event)
+ // Since we're lacking headRepo and maybePull details, we'll pass nil
+ // This follows the same approach as the GitHub client for handling comment events without full PR details
+ response := e.handleCommentEvent(e.Logger, baseRepo, nil, nil, user, pullNum, event.Comment.Body, event.Comment.ID, models.Gitea)
+
+ e.respond(w, logging.Debug, http.StatusOK, response.body)
+}
+
// HandleGithubCommentEvent handles comment events from GitHub where Atlantis
// commands can come from. It's exported to make testing easier.
func (e *VCSEventsController) HandleGithubCommentEvent(event *github.IssueCommentEvent, githubReqID string, logger logging.SimpleLogging) HTTPResponse {
diff --git a/server/controllers/events/events_controller_e2e_test.go b/server/controllers/events/events_controller_e2e_test.go
index 781d483c34..2d38f23685 100644
--- a/server/controllers/events/events_controller_e2e_test.go
+++ b/server/controllers/events/events_controller_e2e_test.go
@@ -1333,7 +1333,6 @@ func setupE2E(t *testing.T, repoDir string, opt setupOption) (events_controllers
workingDir := &events.FileWorkspace{
DataDir: dataDir,
TestingOverrideHeadCloneURL: "override-me",
- Logger: logger,
}
var preWorkflowHooks []*valid.WorkflowHook
if !opt.disablePreWorkflowHooks {
@@ -1425,7 +1424,6 @@ func setupE2E(t *testing.T, repoDir string, opt setupOption) (events_controllers
false,
"auto",
statsScope,
- logger,
terraformClient,
)
diff --git a/server/controllers/events/events_controller_test.go b/server/controllers/events/events_controller_test.go
index 183772df8e..bc1a1c66a0 100644
--- a/server/controllers/events/events_controller_test.go
+++ b/server/controllers/events/events_controller_test.go
@@ -42,6 +42,7 @@ import (
)
const githubHeader = "X-Github-Event"
+const giteaHeader = "X-Gitea-Event"
const gitlabHeader = "X-Gitlab-Event"
const azuredevopsHeader = "Request-Id"
@@ -68,6 +69,17 @@ func TestPost_UnsupportedVCSGithub(t *testing.T) {
ResponseContains(t, w, http.StatusBadRequest, "Ignoring request since not configured to support GitHub")
}
+func TestPost_UnsupportedVCSGitea(t *testing.T) {
+ t.Log("when the request is for an unsupported vcs a 400 is returned")
+ e, _, _, _, _, _, _, _, _ := setup(t)
+ e.SupportedVCSHosts = nil
+ req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
+ req.Header.Set(giteaHeader, "value")
+ w := httptest.NewRecorder()
+ e.Post(w, req)
+ ResponseContains(t, w, http.StatusBadRequest, "Ignoring request since not configured to support Gitea")
+}
+
func TestPost_UnsupportedVCSGitlab(t *testing.T) {
t.Log("when the request is for an unsupported vcs a 400 is returned")
e, _, _, _, _, _, _, _, _ := setup(t)
@@ -90,6 +102,17 @@ func TestPost_InvalidGithubSecret(t *testing.T) {
ResponseContains(t, w, http.StatusBadRequest, "err")
}
+func TestPost_InvalidGiteaSecret(t *testing.T) {
+ t.Log("when the gitea payload can't be validated a 400 is returned")
+ e, v, _, _, _, _, _, _, _ := setup(t)
+ w := httptest.NewRecorder()
+ req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
+ req.Header.Set(giteaHeader, "value")
+ When(v.Validate(req, secret)).ThenReturn(nil, errors.New("err"))
+ e.Post(w, req)
+ ResponseContains(t, w, http.StatusBadRequest, "request did not pass validation")
+}
+
func TestPost_InvalidGitlabSecret(t *testing.T) {
t.Log("when the gitlab payload can't be validated a 400 is returned")
e, _, gl, _, _, _, _, _, _ := setup(t)
@@ -112,6 +135,18 @@ func TestPost_UnsupportedGithubEvent(t *testing.T) {
ResponseContains(t, w, http.StatusOK, "Ignoring unsupported event")
}
+func TestPost_UnsupportedGiteaEvent(t *testing.T) {
+ t.Log("when the event type is an unsupported gitea event we ignore it")
+ e, v, _, _, _, _, _, _, _ := setup(t)
+ w := httptest.NewRecorder()
+ req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
+ req.Header.Set(giteaHeader, "value")
+ e.GiteaWebhookSecret = nil
+ When(v.Validate(req, nil)).ThenReturn([]byte(`{"not an event": ""}`), nil)
+ e.Post(w, req)
+ ResponseContains(t, w, http.StatusOK, "Ignoring unsupported Gitea event")
+}
+
func TestPost_UnsupportedGitlabEvent(t *testing.T) {
t.Log("when the event type is an unsupported gitlab event we ignore it")
e, _, gl, _, _, _, _, _, _ := setup(t)
@@ -976,7 +1011,8 @@ func setup(t *testing.T) (events_controllers.VCSEventsController, *mocks.MockGit
CommandRunner: cr,
PullCleaner: c,
GithubWebhookSecret: secret,
- SupportedVCSHosts: []models.VCSHostType{models.Github, models.Gitlab, models.AzureDevops},
+ SupportedVCSHosts: []models.VCSHostType{models.Github, models.Gitlab, models.AzureDevops, models.Gitea},
+ GiteaWebhookSecret: secret,
GitlabWebhookSecret: secret,
GitlabRequestParserValidator: gl,
RepoAllowlistChecker: repoAllowlistChecker,
diff --git a/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt
index c32ed6dfdc..8f32ac5efc 100644
--- a/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
```diff
@@ -20,10 +21,14 @@ Plan: 1 to add, 0 to change, 0 to destroy.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir1`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir1
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
+ ```shell
+ atlantis plan -d dir1
+ ```
---
### 2. dir: `dir2` workspace: `default`
@@ -43,17 +48,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir2`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir2
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir2`
+ ```shell
+ atlantis plan -d dir2
+ ```
---
### Plan Summary
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt
index 7f0f5f45a8..b6116bfde9 100644
--- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
Show Output
@@ -26,13 +27,17 @@ Terraform will perform the following actions:
Plan: 1 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir1`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir1
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
-
+ ```shell
+ atlantis plan -d dir1
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -59,13 +64,17 @@ Terraform will perform the following actions:
Plan: 1 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir2`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir2
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir2`
-
+ ```shell
+ atlantis plan -d dir2
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -73,7 +82,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt
index 04f87516ab..0e6c0c960c 100644
--- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt
+++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
\ No newline at end of file
+ ```shell
+ atlantis plan -d dir1
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt
index c9a7d87124..9955b4f2c7 100644
--- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt
+++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
```diff
@@ -14,10 +15,14 @@ and found no differences, so no changes are needed.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir1`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir1
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
+ ```shell
+ atlantis plan -d dir1
+ ```
---
### 2. dir: `dir2` workspace: `default`
@@ -43,13 +48,17 @@ Terraform will perform the following actions:
Plan: 1 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir2`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir2
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir2`
-
+ ```shell
+ atlantis plan -d dir2
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -57,7 +66,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 1 with changes, 1 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt
index 8fcbeaa757..4d597951a9 100644
--- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt
@@ -33,17 +33,25 @@ Terraform will perform the following actions:
Plan: 2 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 2 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt
index d7957913db..32680f595f 100644
--- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt
+++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
\ No newline at end of file
+ ```shell
+ atlantis plan -d .
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt
index 45b02dd35f..1e1caabfca 100644
--- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt
+++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
\ No newline at end of file
+ ```shell
+ atlantis plan -d .
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt
index 46a378158b..379d9e8ce7 100644
--- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt
+++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt
@@ -8,13 +8,21 @@ and found no differences, so no changes are needed.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d . -- -var var=overridden`
+ ```shell
+ atlantis plan -d . -- -var var=overridden
+ ```
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt
index 5662b98336..45007d2b8f 100644
--- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt
@@ -33,17 +33,25 @@ Terraform will perform the following actions:
Plan: 2 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 2 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt
index 1823a29537..f4a6cb37d9 100644
--- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt
+++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
\ No newline at end of file
+ ```shell
+ atlantis plan -d .
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt
index d515857ff1..9ab2dbb7e3 100644
--- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt
+++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
\ No newline at end of file
+ ```shell
+ atlantis plan -d .
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt
index adc09b4a37..a7268e38f8 100644
--- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt
+++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt
@@ -8,13 +8,21 @@ and found no differences, so no changes are needed.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt
index 99e0e3434f..38f283b20e 100644
--- a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt
+++ b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p dir1-ops`
+ ```shell
+ atlantis plan -p dir1-ops
+ ```
diff --git a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt
index 3f168d91b3..cd4659c0b7 100644
--- a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt
+++ b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p dir1-ops`
+ ```shell
+ atlantis plan -p dir1-ops
+ ```
diff --git a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt
index 9859fcbc1f..7edca86268 100644
--- a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt
+++ b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt
@@ -8,13 +8,21 @@ and found no differences, so no changes are needed.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -p dir1-ops`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -p dir1-ops
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p dir1-ops`
+ ```shell
+ atlantis plan -p dir1-ops
+ ```
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt
index 1e55d623b5..43a1815d11 100644
--- a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `staging` workspace: `default`
1. dir: `production` workspace: `default`
+---
### 1. dir: `staging` workspace: `default`
Show Output
@@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ var = "staging"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d staging`
-
+ ```shell
+ atlantis plan -d staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ var = "production"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d production`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d production
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d production`
-
+ ```shell
+ atlantis plan -d production
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt
index f08e2c50ae..298d515d93 100644
--- a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt
+++ b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt
@@ -20,8 +20,12 @@ Plan: 1 to add, 0 to change, 0 to destroy.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d production`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d production
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d production`
+ ```shell
+ atlantis plan -d production
+ ```
diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt
index de773736db..9f8399b7f1 100644
--- a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt
+++ b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt
@@ -20,8 +20,12 @@ Plan: 1 to add, 0 to change, 0 to destroy.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d staging`
+ ```shell
+ atlantis plan -d staging
+ ```
diff --git a/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt b/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt
index d1faf53fc8..c3bdadc019 100644
--- a/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt
+++ b/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ var = "staging"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d staging`
-
+ ```shell
+ atlantis plan -d staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt
index 19246ade07..13d2414f3f 100644
--- a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt
+++ b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ var = "production"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d production`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d production
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d production`
-
+ ```shell
+ atlantis plan -d production
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt
index d1faf53fc8..c3bdadc019 100644
--- a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt
+++ b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ var = "staging"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d staging`
-
+ ```shell
+ atlantis plan -d staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt
index 0ace841faf..3d94c6521c 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt
index e6643f8ce3..107a689278 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt
@@ -1,6 +1,7 @@
Ran Approve Policies for 1 projects:
1. dir: `.` workspace: `default`
+---
### 1. dir: `.` workspace: `default`
**Approve Policies Failed**: One or more policy sets require additional approval.
@@ -9,15 +10,25 @@ Ran Approve Policies for 1 projects:
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt
index 0ace841faf..3d94c6521c 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt
index f366769233..c8b5da50dd 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt
@@ -25,15 +25,24 @@ post-conftest output
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt
index d6e39f260d..b842f99682 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt
@@ -1,6 +1,7 @@
Ran Approve Policies for 1 projects:
1. dir: `.` workspace: `default`
+---
### 1. dir: `.` workspace: `default`
**Approve Policies Error**
@@ -15,15 +16,25 @@ Ran Approve Policies for 1 projects:
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt
index 0ace841faf..3d94c6521c 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt
index 0ace841faf..3d94c6521c 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt
index 0ace841faf..3d94c6521c 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt
index 0ace841faf..3d94c6521c 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt
index 0ace841faf..3d94c6521c 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt
index 0ace841faf..3d94c6521c 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt
index c7f45c85f5..669b9cb064 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - null_resource_policy - WARNING: Null Resource crea
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt
index eb6bda8987..7e0bd72a67 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt
@@ -2,6 +2,7 @@ Ran Apply for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
```diff
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt
index c292c651f3..944cd1ba56 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt
@@ -2,6 +2,7 @@ Ran Policy Check for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
#### Policy Set: `test_policy`
@@ -13,10 +14,14 @@ Ran Policy Check for 2 projects:
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir1`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir1
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d dir1`
+ ```shell
+ atlantis plan -d dir1
+ ```
---
### 2. dir: `dir2` workspace: `default`
@@ -35,15 +40,25 @@ FAIL - - main - WARNING: Forbidden Resource creation is pro
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d dir2`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d dir2
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d dir2`
+ ```shell
+ atlantis plan -d dir2
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt
index 098c4eba93..e01442f671 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
Show Output
@@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir1`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir1
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
-
+ ```shell
+ atlantis plan -d dir1
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir2`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir2
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir2`
-
+ ```shell
+ atlantis plan -d dir2
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt
index ea7d4bf3ec..0fe7b1646b 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt
@@ -9,13 +9,21 @@ state, without changing any real infrastructure.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt
index 0ace841faf..3d94c6521c 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt
@@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
policy set: test_policy: requires: 1 approval(s), have: 0.
```
* :heavy_check_mark: To **approve** this project, comment:
- * `atlantis approve_policies -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis approve_policies -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `atlantis approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan`
\ No newline at end of file
+ ```shell
+ atlantis plan
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt
index 6f7ce87643..d3f41336a8 100644
--- a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt
@@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt
index a136ff9691..4b78a636d5 100644
--- a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt
+++ b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt
@@ -2,6 +2,7 @@ Ran Apply for 2 projects:
1. dir: `infrastructure/production` workspace: `default`
1. dir: `infrastructure/staging` workspace: `default`
+---
### 1. dir: `infrastructure/production` workspace: `default`
```diff
diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt
index 29f5f76dae..8bf40fc657 100644
--- a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `infrastructure/staging` workspace: `default`
1. dir: `infrastructure/production` workspace: `default`
+---
### 1. dir: `infrastructure/staging` workspace: `default`
```diff
@@ -20,10 +21,14 @@ Plan: 1 to add, 0 to change, 0 to destroy.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d infrastructure/staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d infrastructure/staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d infrastructure/staging`
+ ```shell
+ atlantis plan -d infrastructure/staging
+ ```
---
### 2. dir: `infrastructure/production` workspace: `default`
@@ -43,17 +48,25 @@ Plan: 1 to add, 0 to change, 0 to destroy.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d infrastructure/production`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d infrastructure/production
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d infrastructure/production`
+ ```shell
+ atlantis plan -d infrastructure/production
+ ```
---
### Plan Summary
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt
index ad9591b8ae..37e78c18af 100644
--- a/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `.` workspace: `default`
1. dir: `.` workspace: `staging`
+---
### 1. dir: `.` workspace: `default`
Show Output
@@ -28,13 +29,17 @@ Changes to Outputs:
postplan custom
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -61,13 +66,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "staging"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -w staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -w staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -w staging`
-
+ ```shell
+ atlantis plan -w staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -75,7 +84,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt
index 6e70ac89fb..822531032c 100644
--- a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt
@@ -30,17 +30,25 @@ Changes to Outputs:
+ var = "default"
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 3 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt
index 6e70ac89fb..822531032c 100644
--- a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt
+++ b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt
@@ -30,17 +30,25 @@ Changes to Outputs:
+ var = "default"
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 3 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt
index 61eac2271a..4e757a396c 100644
--- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt
+++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt
@@ -2,6 +2,7 @@ Ran Apply for 2 projects:
1. dir: `.` workspace: `default`
1. dir: `.` workspace: `staging`
+---
### 1. dir: `.` workspace: `default`
```diff
diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt
index dcbb45bf78..c445925f6c 100644
--- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `.` workspace: `default`
1. dir: `.` workspace: `staging`
+---
### 1. dir: `.` workspace: `default`
Show Output
@@ -29,13 +30,17 @@ Changes to Outputs:
postplan
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -60,13 +65,17 @@ Changes to Outputs:
+ var = "fromfile"
+ workspace = "staging"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -w staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -w staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -w staging`
-
+ ```shell
+ atlantis plan -w staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -74,7 +83,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt
index f0419c9189..b944f4deab 100644
--- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt
+++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt
@@ -25,17 +25,25 @@ Changes to Outputs:
postplan
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt
index 7e34016bab..64880424f6 100644
--- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt
+++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt
@@ -20,17 +20,25 @@ Changes to Outputs:
+ var = "fromfile"
+ workspace = "staging"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -w staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -w staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -w staging`
-
+ ```shell
+ atlantis plan -w staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt
index 2977099b55..cb7dd9a752 100644
--- a/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt
+++ b/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt
@@ -2,6 +2,7 @@ Ran Apply for 2 projects:
1. dir: `.` workspace: `default`
1. dir: `.` workspace: `new_workspace`
+---
### 1. dir: `.` workspace: `default`
Show Output
diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt
index 242515e415..13bdae3fac 100644
--- a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt
+++ b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt
@@ -30,17 +30,25 @@ Changes to Outputs:
+ var = "new_workspace"
+ workspace = "new_workspace"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -w new_workspace`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -w new_workspace
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -w new_workspace -- -var var=new_workspace`
-
+ ```shell
+ atlantis plan -w new_workspace -- -var var=new_workspace
+ ```
Plan: 3 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt
index 5a86cff0c9..ab28d0ca84 100644
--- a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt
+++ b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt
@@ -30,17 +30,25 @@ Changes to Outputs:
+ var = "overridden"
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d . -- -var var=overridden`
-
+ ```shell
+ atlantis plan -d . -- -var var=overridden
+ ```
Plan: 3 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt
index 54e5505476..191b540b63 100644
--- a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt
+++ b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt
@@ -30,17 +30,25 @@ Changes to Outputs:
+ var = "default_workspace"
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d . -- -var var=default_workspace`
-
+ ```shell
+ atlantis plan -d . -- -var var=default_workspace
+ ```
Plan: 3 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt
index c6e0823922..fd03e48aed 100644
--- a/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt
+++ b/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt
@@ -5,13 +5,21 @@ Ran Policy Check for dir: `.` workspace: `default`
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * `atlantis plan -d .`
+ ```shell
+ atlantis plan -d .
+ ```
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt
index 6e70ac89fb..822531032c 100644
--- a/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt
@@ -30,17 +30,25 @@ Changes to Outputs:
+ var = "default"
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 3 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt
index 49c4dc2673..fe62683dad 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
Show Output
@@ -26,13 +27,17 @@ Terraform will perform the following actions:
Plan: 1 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir1`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir1
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
-
+ ```shell
+ atlantis plan -d dir1
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -59,13 +64,17 @@ Terraform will perform the following actions:
Plan: 1 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir2`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir2
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir2`
-
+ ```shell
+ atlantis plan -d dir2
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -73,7 +82,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt
index 45b6c1ed55..8d98fee1d7 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
\ No newline at end of file
+ ```shell
+ atlantis plan -d dir1
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt
index 7a28ec5e85..e6bef5251a 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir2`
\ No newline at end of file
+ ```shell
+ atlantis plan -d dir2
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt
index 49c4dc2673..fe62683dad 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
Show Output
@@ -26,13 +27,17 @@ Terraform will perform the following actions:
Plan: 1 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir1`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir1
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
-
+ ```shell
+ atlantis plan -d dir1
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -59,13 +64,17 @@ Terraform will perform the following actions:
Plan: 1 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir2`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir2
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir2`
-
+ ```shell
+ atlantis plan -d dir2
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -73,7 +82,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt
index fb3cfdbbd7..d74495004a 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
```diff
@@ -14,10 +15,14 @@ and found no differences, so no changes are needed.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir1`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir1
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
+ ```shell
+ atlantis plan -d dir1
+ ```
---
### 2. dir: `dir2` workspace: `default`
@@ -31,17 +36,25 @@ and found no differences, so no changes are needed.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d dir2`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d dir2
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir2`
+ ```shell
+ atlantis plan -d dir2
+ ```
---
### Plan Summary
2 projects, 0 with changes, 2 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt
index 3c8e0eb0bb..973455d73c 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt
@@ -2,6 +2,7 @@ Ran State for 2 projects:
1. dir: `dir1` workspace: `default`
1. dir: `dir2` workspace: `default`
+---
### 1. dir: `dir1` workspace: `default`
```diff
@@ -12,7 +13,9 @@ Successfully removed 1 resource instance(s).
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir1`
+ ```shell
+ atlantis plan -d dir1
+ ```
---
### 2. dir: `dir2` workspace: `default`
@@ -24,6 +27,8 @@ Successfully removed 1 resource instance(s).
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d dir2`
+ ```shell
+ atlantis plan -d dir2
+ ```
---
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt
index 077f989d9a..530e9df755 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt
@@ -44,17 +44,25 @@ Terraform will perform the following actions:
Plan: 3 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
-
+ ```shell
+ atlantis plan -d .
+ ```
Plan: 3 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt
index d7957913db..32680f595f 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
\ No newline at end of file
+ ```shell
+ atlantis plan -d .
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt
index 284c8e2457..982e937496 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
\ No newline at end of file
+ ```shell
+ atlantis plan -d .
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt
index 1f17baa2d7..be74444839 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
\ No newline at end of file
+ ```shell
+ atlantis plan -d .
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt
index edb4c17579..548bf843a2 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt
@@ -44,17 +44,25 @@ Terraform will perform the following actions:
Plan: 3 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d . -- -var var=overridden`
-
+ ```shell
+ atlantis plan -d . -- -var var=overridden
+ ```
Plan: 3 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt
index 46a378158b..379d9e8ce7 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt
@@ -8,13 +8,21 @@ and found no differences, so no changes are needed.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d .`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d .
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d . -- -var var=overridden`
+ ```shell
+ atlantis plan -d . -- -var var=overridden
+ ```
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt
index 264b5f2881..a6f0f97cce 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt
@@ -8,4 +8,6 @@ Successfully removed 1 resource instance(s).
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
\ No newline at end of file
+ ```shell
+ atlantis plan -d .
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt
index a0d1b54717..0848fc65e8 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt
@@ -9,4 +9,6 @@ Successfully removed 2 resource instance(s).
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d .`
\ No newline at end of file
+ ```shell
+ atlantis plan -d .
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt
index a6a1dbbfaa..b81ff32704 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt
@@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform.
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p dir1-ops`
\ No newline at end of file
+ ```shell
+ atlantis plan -p dir1-ops
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt
index b24ee90b20..e1ea612f2f 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt
@@ -22,17 +22,25 @@ Terraform will perform the following actions:
Plan: 1 to add, 0 to change, 0 to destroy.
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -p dir1-ops`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -p dir1-ops
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p dir1-ops`
-
+ ```shell
+ atlantis plan -p dir1-ops
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt
index 4c73caa512..3beeb14cab 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt
@@ -10,13 +10,21 @@ and found no differences, so no changes are needed.
```
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -p dir1-ops`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -p dir1-ops
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p dir1-ops`
+ ```shell
+ atlantis plan -p dir1-ops
+ ```
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt
index 5aa99db217..8c63577a49 100644
--- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt
+++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt
@@ -8,4 +8,6 @@ Successfully removed 1 resource instance(s).
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p dir1-ops`
\ No newline at end of file
+ ```shell
+ atlantis plan -p dir1-ops
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt
index 20be38a244..cf3378bc59 100644
--- a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt
+++ b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt
@@ -20,17 +20,25 @@ Changes to Outputs:
+ var = "default"
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -p default`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -p default
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p default`
-
+ ```shell
+ atlantis plan -p default
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt
index e34c9bc2dd..efad85de0e 100644
--- a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt
+++ b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt
@@ -20,17 +20,25 @@ Changes to Outputs:
+ var = "staging"
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -p staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -p staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p staging`
-
+ ```shell
+ atlantis plan -p staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt
index 82ce193d9f..75c4320f96 100644
--- a/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt
+++ b/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. project: `default` dir: `.` workspace: `default`
1. project: `staging` dir: `.` workspace: `default`
+---
### 1. project: `default` dir: `.` workspace: `default`
Show Output
@@ -26,13 +27,17 @@ Changes to Outputs:
workspace=default
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -p default`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -p default
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p default`
-
+ ```shell
+ atlantis plan -p default
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -57,13 +62,17 @@ Changes to Outputs:
+ var = "staging"
+ workspace = "default"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -p staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -p staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -p staging`
-
+ ```shell
+ atlantis plan -p staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -71,7 +80,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt
index cd4e8e0b95..986241f599 100644
--- a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt
+++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `production` workspace: `production`
1. dir: `staging` workspace: `staging`
+---
### 1. dir: `production` workspace: `production`
Show Output
@@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "production"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d production -w production`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d production -w production
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d production -w production`
-
+ ```shell
+ atlantis plan -d production -w production
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "staging"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d staging -w staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d staging -w staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d staging -w staging`
-
+ ```shell
+ atlantis plan -d staging -w staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt
index cd4e8e0b95..986241f599 100644
--- a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt
+++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt
@@ -2,6 +2,7 @@ Ran Plan for 2 projects:
1. dir: `production` workspace: `production`
1. dir: `staging` workspace: `staging`
+---
### 1. dir: `production` workspace: `production`
Show Output
@@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "production"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d production -w production`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d production -w production
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d production -w production`
-
+ ```shell
+ atlantis plan -d production -w production
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ workspace = "staging"
```
+
* :arrow_forward: To **apply** this plan, comment:
- * `atlantis apply -d staging -w staging`
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ ```shell
+ atlantis apply -d staging -w staging
+ ```
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * `atlantis plan -d staging -w staging`
-
+ ```shell
+ atlantis plan -d staging -w staging
+ ```
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `atlantis apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `atlantis unlock`
\ No newline at end of file
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ ```shell
+ atlantis apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ ```shell
+ atlantis unlock
+ ```
\ No newline at end of file
diff --git a/server/controllers/github_app_controller.go b/server/controllers/github_app_controller.go
index 5ac08d00fa..823c82928e 100644
--- a/server/controllers/github_app_controller.go
+++ b/server/controllers/github_app_controller.go
@@ -6,7 +6,7 @@ import (
"net/http"
"net/url"
- "github.com/runatlantis/atlantis/server/controllers/templates"
+ "github.com/runatlantis/atlantis/server/controllers/web_templates"
"github.com/runatlantis/atlantis/server/events/vcs"
"github.com/runatlantis/atlantis/server/logging"
)
@@ -70,7 +70,7 @@ func (g *GithubAppController) ExchangeCode(w http.ResponseWriter, r *http.Reques
g.Logger.Debug("Found credentials for GitHub app %q with id %d", app.Name, app.ID)
- err = templates.GithubAppSetupTemplate.Execute(w, templates.GithubSetupData{
+ err = web_templates.GithubAppSetupTemplate.Execute(w, web_templates.GithubSetupData{
Target: "",
Manifest: "",
ID: app.ID,
@@ -142,7 +142,7 @@ func (g *GithubAppController) New(w http.ResponseWriter, _ *http.Request) {
return
}
- err = templates.GithubAppSetupTemplate.Execute(w, templates.GithubSetupData{
+ err = web_templates.GithubAppSetupTemplate.Execute(w, web_templates.GithubSetupData{
Target: url.String(),
Manifest: string(jsonManifest),
})
diff --git a/server/controllers/jobs_controller.go b/server/controllers/jobs_controller.go
index 0363977944..bb38a05e44 100644
--- a/server/controllers/jobs_controller.go
+++ b/server/controllers/jobs_controller.go
@@ -6,7 +6,7 @@ import (
"net/url"
"github.com/gorilla/mux"
- "github.com/runatlantis/atlantis/server/controllers/templates"
+ "github.com/runatlantis/atlantis/server/controllers/web_templates"
"github.com/runatlantis/atlantis/server/controllers/websocket"
"github.com/runatlantis/atlantis/server/core/locking"
"github.com/runatlantis/atlantis/server/logging"
@@ -29,8 +29,8 @@ type JobsController struct {
AtlantisVersion string
AtlantisURL *url.URL
Logger logging.SimpleLogging
- ProjectJobsTemplate templates.TemplateWriter
- ProjectJobsErrorTemplate templates.TemplateWriter
+ ProjectJobsTemplate web_templates.TemplateWriter
+ ProjectJobsErrorTemplate web_templates.TemplateWriter
Backend locking.Backend
WsMux *websocket.Multiplexor
KeyGenerator JobIDKeyGenerator
@@ -45,7 +45,7 @@ func (j *JobsController) getProjectJobs(w http.ResponseWriter, r *http.Request)
return err
}
- viewData := templates.ProjectJobData{
+ viewData := web_templates.ProjectJobData{
AtlantisVersion: j.AtlantisVersion,
ProjectPath: jobID,
CleanedBasePath: j.AtlantisURL.Path,
diff --git a/server/controllers/locks_controller.go b/server/controllers/locks_controller.go
index bab7fad27a..27b330c8b3 100644
--- a/server/controllers/locks_controller.go
+++ b/server/controllers/locks_controller.go
@@ -5,7 +5,7 @@ import (
"net/http"
"net/url"
- "github.com/runatlantis/atlantis/server/controllers/templates"
+ "github.com/runatlantis/atlantis/server/controllers/web_templates"
"github.com/gorilla/mux"
"github.com/runatlantis/atlantis/server/core/locking"
@@ -23,7 +23,7 @@ type LocksController struct {
Logger logging.SimpleLogging
ApplyLocker locking.ApplyLocker
VCSClient vcs.Client
- LockDetailTemplate templates.TemplateWriter
+ LockDetailTemplate web_templates.TemplateWriter
WorkingDir events.WorkingDir
WorkingDirLocker events.WorkingDirLocker
Backend locking.Backend
@@ -73,12 +73,12 @@ func (l *LocksController) GetLock(w http.ResponseWriter, r *http.Request) {
return
}
if lock == nil {
- l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id %q", idUnencoded)
+ l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id '%s'", idUnencoded)
return
}
owner, repo := models.SplitRepoFullName(lock.Project.RepoFullName)
- viewData := templates.LockDetailData{
+ viewData := web_templates.LockDetailData{
LockKeyEncoded: id,
LockKey: idUnencoded,
PullRequestLink: lock.Pull.URL,
@@ -107,18 +107,18 @@ func (l *LocksController) DeleteLock(w http.ResponseWriter, r *http.Request) {
idUnencoded, err := url.PathUnescape(id)
if err != nil {
- l.respond(w, logging.Warn, http.StatusBadRequest, "Invalid lock id %q. Failed with error: %s", id, err)
+ l.respond(w, logging.Warn, http.StatusBadRequest, "Invalid lock id '%s'. Failed with error: '%s'", id, err)
return
}
- lock, err := l.DeleteLockCommand.DeleteLock(idUnencoded)
+ lock, err := l.DeleteLockCommand.DeleteLock(l.Logger, idUnencoded)
if err != nil {
- l.respond(w, logging.Error, http.StatusInternalServerError, "deleting lock failed with: %s", err)
+ l.respond(w, logging.Error, http.StatusInternalServerError, "deleting lock failed with: '%s'", err)
return
}
if lock == nil {
- l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id %q", idUnencoded)
+ l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id '%s'", idUnencoded)
return
}
@@ -139,7 +139,7 @@ func (l *LocksController) DeleteLock(w http.ResponseWriter, r *http.Request) {
} else {
l.Logger.Debug("skipping commenting on pull request and deleting workspace because BaseRepo field is empty")
}
- l.respond(w, logging.Info, http.StatusOK, "Deleted lock id %q", id)
+ l.respond(w, logging.Info, http.StatusOK, "Deleted lock id '%s'", id)
}
// respond is a helper function to respond and log the response. lvl is the log
diff --git a/server/controllers/locks_controller_test.go b/server/controllers/locks_controller_test.go
index 0f80e7c1f7..d878b34e33 100644
--- a/server/controllers/locks_controller_test.go
+++ b/server/controllers/locks_controller_test.go
@@ -11,8 +11,8 @@ import (
"time"
"github.com/runatlantis/atlantis/server/controllers"
- "github.com/runatlantis/atlantis/server/controllers/templates"
- tMocks "github.com/runatlantis/atlantis/server/controllers/templates/mocks"
+ "github.com/runatlantis/atlantis/server/controllers/web_templates"
+ tMocks "github.com/runatlantis/atlantis/server/controllers/web_templates/mocks"
"github.com/runatlantis/atlantis/server/core/db"
"github.com/runatlantis/atlantis/server/core/locking"
@@ -159,7 +159,7 @@ func TestGetLock_None(t *testing.T) {
req = mux.SetURLVars(req, map[string]string{"id": "id"})
w := httptest.NewRecorder()
lc.GetLock(w, req)
- ResponseContains(t, w, http.StatusNotFound, "No lock found at id \"id\"")
+ ResponseContains(t, w, http.StatusNotFound, "No lock found at id 'id'")
}
func TestGetLock_Success(t *testing.T) {
@@ -185,7 +185,7 @@ func TestGetLock_Success(t *testing.T) {
req = mux.SetURLVars(req, map[string]string{"id": "id"})
w := httptest.NewRecorder()
lc.GetLock(w, req)
- tmpl.VerifyWasCalledOnce().Execute(w, templates.LockDetailData{
+ tmpl.VerifyWasCalledOnce().Execute(w, web_templates.LockDetailData{
LockKeyEncoded: "id",
LockKey: "id",
RepoOwner: "owner",
@@ -215,14 +215,14 @@ func TestDeleteLock_InvalidLockID(t *testing.T) {
req = mux.SetURLVars(req, map[string]string{"id": "%A@"})
w := httptest.NewRecorder()
lc.DeleteLock(w, req)
- ResponseContains(t, w, http.StatusBadRequest, "Invalid lock id \"%A@\"")
+ ResponseContains(t, w, http.StatusBadRequest, "Invalid lock id '%A@'")
}
func TestDeleteLock_LockerErr(t *testing.T) {
t.Log("If there is an error retrieving the lock, a 500 is returned")
RegisterMockTestingT(t)
dlc := mocks2.NewMockDeleteLockCommand()
- When(dlc.DeleteLock("id")).ThenReturn(nil, errors.New("err"))
+ When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(nil, errors.New("err"))
lc := controllers.LocksController{
DeleteLockCommand: dlc,
Logger: logging.NewNoopLogger(t),
@@ -238,7 +238,7 @@ func TestDeleteLock_None(t *testing.T) {
t.Log("If there is no lock at that ID we get a 404")
RegisterMockTestingT(t)
dlc := mocks2.NewMockDeleteLockCommand()
- When(dlc.DeleteLock("id")).ThenReturn(nil, nil)
+ When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(nil, nil)
lc := controllers.LocksController{
DeleteLockCommand: dlc,
Logger: logging.NewNoopLogger(t),
@@ -247,7 +247,7 @@ func TestDeleteLock_None(t *testing.T) {
req = mux.SetURLVars(req, map[string]string{"id": "id"})
w := httptest.NewRecorder()
lc.DeleteLock(w, req)
- ResponseContains(t, w, http.StatusNotFound, "No lock found at id \"id\"")
+ ResponseContains(t, w, http.StatusNotFound, "No lock found at id 'id'")
}
func TestDeleteLock_OldFormat(t *testing.T) {
@@ -255,7 +255,7 @@ func TestDeleteLock_OldFormat(t *testing.T) {
RegisterMockTestingT(t)
cp := vcsmocks.NewMockClient()
dlc := mocks2.NewMockDeleteLockCommand()
- When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{}, nil)
+ When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{}, nil)
lc := controllers.LocksController{
DeleteLockCommand: dlc,
Logger: logging.NewNoopLogger(t),
@@ -265,7 +265,7 @@ func TestDeleteLock_OldFormat(t *testing.T) {
req = mux.SetURLVars(req, map[string]string{"id": "id"})
w := httptest.NewRecorder()
lc.DeleteLock(w, req)
- ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"")
+ ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'")
cp.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]())
}
@@ -284,7 +284,7 @@ func TestDeleteLock_UpdateProjectStatus(t *testing.T) {
pull := models.PullRequest{
BaseRepo: models.Repo{FullName: repoName},
}
- When(l.DeleteLock("id")).ThenReturn(&models.ProjectLock{
+ When(l.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{
Pull: pull,
Workspace: workspaceName,
Project: models.Project{
@@ -321,7 +321,7 @@ func TestDeleteLock_UpdateProjectStatus(t *testing.T) {
req = mux.SetURLVars(req, map[string]string{"id": "id"})
w := httptest.NewRecorder()
lc.DeleteLock(w, req)
- ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"")
+ ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'")
status, err := backend.GetPullStatus(pull)
Ok(t, err)
Assert(t, status.Projects != nil, "status projects was nil")
@@ -338,7 +338,7 @@ func TestDeleteLock_CommentFailed(t *testing.T) {
t.Log("If the commenting fails we still return success")
RegisterMockTestingT(t)
dlc := mocks2.NewMockDeleteLockCommand()
- When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{
+ When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{
Pull: models.PullRequest{
BaseRepo: models.Repo{FullName: "owner/repo"},
},
@@ -363,7 +363,7 @@ func TestDeleteLock_CommentFailed(t *testing.T) {
req = mux.SetURLVars(req, map[string]string{"id": "id"})
w := httptest.NewRecorder()
lc.DeleteLock(w, req)
- ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"")
+ ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'")
}
func TestDeleteLock_CommentSuccess(t *testing.T) {
@@ -380,7 +380,7 @@ func TestDeleteLock_CommentSuccess(t *testing.T) {
pull := models.PullRequest{
BaseRepo: models.Repo{FullName: "owner/repo"},
}
- When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{
+ When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{
Pull: pull,
Workspace: "workspace",
Project: models.Project{
@@ -400,7 +400,7 @@ func TestDeleteLock_CommentSuccess(t *testing.T) {
req = mux.SetURLVars(req, map[string]string{"id": "id"})
w := httptest.NewRecorder()
lc.DeleteLock(w, req)
- ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"")
+ ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'")
cp.VerifyWasCalled(Once()).CreateComment(Any[logging.SimpleLogging](), Eq(pull.BaseRepo), Eq(pull.Num),
Eq("**Warning**: The plan for dir: `path` workspace: `workspace` was **discarded** via the Atlantis UI.\n\n"+
"To `apply` this plan you must run `plan` again."), Eq(""))
diff --git a/server/controllers/templates/web_templates.go b/server/controllers/templates/web_templates.go
deleted file mode 100644
index 01bbc2faac..0000000000
--- a/server/controllers/templates/web_templates.go
+++ /dev/null
@@ -1,695 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-
-package templates
-
-import (
- "html/template"
- "io"
- "time"
-
- "github.com/runatlantis/atlantis/server/jobs"
-)
-
-//go:generate pegomock generate --package mocks -o mocks/mock_template_writer.go TemplateWriter
-
-// TemplateWriter is an interface over html/template that's used to enable
-// mocking.
-type TemplateWriter interface {
- // Execute applies a parsed template to the specified data object,
- // writing the output to wr.
- Execute(wr io.Writer, data interface{}) error
-}
-
-// LockIndexData holds the fields needed to display the index view for locks.
-type LockIndexData struct {
- LockPath string
- RepoFullName string
- PullNum int
- Path string
- Workspace string
- LockedBy string
- Time time.Time
- TimeFormatted string
-}
-
-// ApplyLockData holds the fields to display in the index view
-type ApplyLockData struct {
- Locked bool
- GlobalApplyLockEnabled bool
- Time time.Time
- TimeFormatted string
-}
-
-// IndexData holds the data for rendering the index page
-type IndexData struct {
- Locks []LockIndexData
- PullToJobMapping []jobs.PullInfoWithJobIDs
-
- ApplyLock ApplyLockData
- AtlantisVersion string
- // CleanedBasePath is the path Atlantis is accessible at externally. If
- // not using a path-based proxy, this will be an empty string. Never ends
- // in a '/' (hence "cleaned").
- CleanedBasePath string
-}
-
-var IndexTemplate = template.Must(template.New("index.html.tmpl").Parse(`
-
-
-
-
- atlantis
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {{ if .ApplyLock.GlobalApplyLockEnabled }}
- {{ if .ApplyLock.Locked }}
-
-
Apply commands are disabled globally
-
Lock Status
: Active
-
Active Since
: {{ .ApplyLock.TimeFormatted }}
-
Enable Apply Commands
-
- {{ else }}
-
- {{ end }}
- {{ end }}
-
-
-
-
-
- Locks
- {{ $basePath := .CleanedBasePath }}
- {{ if .Locks }}
-
-
- {{ range .Locks }}
-
- {{ end }}
-
- {{ else }}
- No locks found.
- {{ end }}
-
-
-
-
-
- Jobs
- {{ if .PullToJobMapping }}
-
-
- {{ range .PullToJobMapping }}
-
-
{{ .Pull.RepoFullName }} #{{ .Pull.PullNum }}
-
{{ if .Pull.Path }}{{ .Pull.Path }}
{{ end }}
-
{{ if .Pull.Workspace }}{{ .Pull.Workspace }}
{{ end }}
-
- {{ range .JobIDInfos }}
- {{ .TimeFormatted }}
- {{ end }}
-
-
- {{ range .JobIDInfos }}
-
- {{ end }}
-
-
- {{ range .JobIDInfos }}
- {{ .JobDescription }}
- {{ end }}
-
-
- {{ end }}
-
- {{ else }}
- No jobs found.
- {{ end }}
-
-
-
-
-
-{{ .AtlantisVersion }}
-
-
-
-
-`))
-
-// LockDetailData holds the fields needed to display the lock detail view.
-type LockDetailData struct {
- LockKeyEncoded string
- LockKey string
- RepoOwner string
- RepoName string
- PullRequestLink string
- LockedBy string
- Workspace string
- AtlantisVersion string
- // CleanedBasePath is the path Atlantis is accessible at externally. If
- // not using a path-based proxy, this will be an empty string. Never ends
- // in a '/' (hence "cleaned").
- CleanedBasePath string
-}
-
-var LockTemplate = template.Must(template.New("lock.html.tmpl").Parse(`
-
-
-
-
- atlantis
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Repo Owner:
{{.RepoOwner}}
-
Repo Name:
{{.RepoName}}
-
Pull Request Link:
-
Locked By:
{{.LockedBy}}
-
Workspace:
{{.Workspace}}
-
-
- Discard Plan & Unlock
-
-
-
-
-v{{ .AtlantisVersion }}
-
-
-
-
-`))
-
-// ProjectJobData holds the data needed to stream the current PR information
-type ProjectJobData struct {
- AtlantisVersion string
- ProjectPath string
- CleanedBasePath string
-}
-
-var ProjectJobsTemplate = template.Must(template.New("blank.html.tmpl").Parse(`
-
-
-
-
- atlantis
-
-
-
-
-
-
-
-
-
-
-
-
-
- atlantis
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-`))
-
-type ProjectJobsError struct {
- AtlantisVersion string
- ProjectPath string
- CleanedBasePath string
-}
-
-var ProjectJobsErrorTemplate = template.Must(template.New("blank.html.tmpl").Parse(`
-
-
-
-
- atlantis
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-`))
-
-// GithubSetupData holds the data for rendering the github app setup page
-type GithubSetupData struct {
- Target string
- Manifest string
- ID int64
- Key string
- WebhookSecret string
- URL string
- CleanedBasePath string
-}
-
-var GithubAppSetupTemplate = template.Must(template.New("github-app.html.tmpl").Parse(`
-
-
-
-
- atlantis
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {{ if .Target }}
-
- {{ else }}
- Visit {{ .URL }}/installations/new to install the app for your user or organization, then update the following values in your config and restart Atlantis:
-
-
- gh-app-id: {{ .ID }}
- gh-app-key-file: {{ .Key }}
- gh-webhook-secret: {{ .WebhookSecret }}
-
- {{ end }}
-
-
-
-
-`))
diff --git a/server/controllers/templates/mocks/mock_template_writer.go b/server/controllers/web_templates/mocks/mock_template_writer.go
similarity index 100%
rename from server/controllers/templates/mocks/mock_template_writer.go
rename to server/controllers/web_templates/mocks/mock_template_writer.go
diff --git a/server/controllers/web_templates/templates/github-app.html.tmpl b/server/controllers/web_templates/templates/github-app.html.tmpl
new file mode 100644
index 0000000000..34ce01550d
--- /dev/null
+++ b/server/controllers/web_templates/templates/github-app.html.tmpl
@@ -0,0 +1,81 @@
+
+
+
+
+ atlantis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ if .Target }}
+
+ {{ .Manifest }}
+ Setup
+
+ {{ else }}
+ Visit {{ .URL }}/installations/new to install the app for your user or organization, then update the following values in your config and restart Atlantis:
+
+
+ gh-app-id: {{ .ID }}
+ gh-app-key-file: {{ .Key }}
+ gh-webhook-secret: {{ .WebhookSecret }}
+
+ {{ end }}
+
+
+
+
diff --git a/server/controllers/web_templates/templates/index.html.tmpl b/server/controllers/web_templates/templates/index.html.tmpl
new file mode 100644
index 0000000000..b9021f9b61
--- /dev/null
+++ b/server/controllers/web_templates/templates/index.html.tmpl
@@ -0,0 +1,243 @@
+
+
+
+
+ atlantis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ if .ApplyLock.GlobalApplyLockEnabled }}
+ {{ if .ApplyLock.Locked }}
+
+
Apply commands are disabled globally
+
Lock Status
: Active
+
Active Since
: {{ .ApplyLock.TimeFormatted }}
+
Enable Apply Commands
+
+ {{ else }}
+
+ {{ end }}
+ {{ end }}
+
+
+
+
+
+ Locks
+ {{ $basePath := .CleanedBasePath }}
+ {{ if .Locks }}
+
+
+ {{ range .Locks }}
+
+ {{ end }}
+
+ {{ else }}
+ No locks found.
+ {{ end }}
+
+
+
+
+
+ Jobs
+ {{ if .PullToJobMapping }}
+
+
+ {{ range .PullToJobMapping }}
+
+
{{ .Pull.RepoFullName }} #{{ .Pull.PullNum }}
+
{{ if .Pull.Path }}{{ .Pull.Path }}
{{ end }}
+
{{ if .Pull.Workspace }}{{ .Pull.Workspace }}
{{ end }}
+
+ {{ range .JobIDInfos }}
+ {{ .TimeFormatted }}
+ {{ end }}
+
+
+ {{ range .JobIDInfos }}
+
+ {{ end }}
+
+
+ {{ range .JobIDInfos }}
+ {{ .JobDescription }}
+ {{ end }}
+
+
+ {{ end }}
+
+ {{ else }}
+ No jobs found.
+ {{ end }}
+
+
+
+
+
+{{ .AtlantisVersion }}
+
+
+
+
diff --git a/server/controllers/web_templates/templates/lock.html.tmpl b/server/controllers/web_templates/templates/lock.html.tmpl
new file mode 100644
index 0000000000..56bf25a06b
--- /dev/null
+++ b/server/controllers/web_templates/templates/lock.html.tmpl
@@ -0,0 +1,97 @@
+
+
+
+
+ atlantis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Repo Owner:
{{.RepoOwner}}
+
Repo Name:
{{.RepoName}}
+
Pull Request Link:
+
Locked By:
{{.LockedBy}}
+
Workspace:
{{.Workspace}}
+
+
+ Discard Plan & Unlock
+
+
+
+
+v{{ .AtlantisVersion }}
+
+
+
+
\ No newline at end of file
diff --git a/server/controllers/web_templates/templates/project-jobs-error.html.tmpl b/server/controllers/web_templates/templates/project-jobs-error.html.tmpl
new file mode 100644
index 0000000000..8eead799b7
--- /dev/null
+++ b/server/controllers/web_templates/templates/project-jobs-error.html.tmpl
@@ -0,0 +1,59 @@
+
+
+
+
+ atlantis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/server/controllers/web_templates/templates/project-jobs.html.tmpl b/server/controllers/web_templates/templates/project-jobs.html.tmpl
new file mode 100644
index 0000000000..aaeb222568
--- /dev/null
+++ b/server/controllers/web_templates/templates/project-jobs.html.tmpl
@@ -0,0 +1,95 @@
+
+
+
+
+ atlantis
+
+
+
+
+
+
+
+
+
+
+
+
+
+ atlantis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/server/controllers/web_templates/web_templates.go b/server/controllers/web_templates/web_templates.go
new file mode 100644
index 0000000000..0794c80fba
--- /dev/null
+++ b/server/controllers/web_templates/web_templates.go
@@ -0,0 +1,131 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+
+package web_templates
+
+import (
+ "embed"
+ "html/template"
+ "io"
+ "time"
+
+ "github.com/Masterminds/sprig/v3"
+ "github.com/runatlantis/atlantis/server/jobs"
+)
+
+//go:generate pegomock generate --package mocks -o mocks/mock_template_writer.go TemplateWriter
+
+//go:embed templates/*
+var templatesFS embed.FS
+
+// Read all the templates from the embedded filesystem
+var templates, _ = template.New("").Funcs(sprig.TxtFuncMap()).ParseFS(templatesFS, "templates/*.tmpl")
+
+var templateFileNames = map[string]string{
+ "index": "index.html.tmpl",
+ "lock": "lock.html.tmpl",
+ "project-jobs": "project-jobs.html.tmpl",
+ "project-jobs-error": "project-jobs-error.html.tmpl",
+ "github-app": "github-app.html.tmpl",
+}
+
+// TemplateWriter is an interface over html/template that's used to enable
+// mocking.
+type TemplateWriter interface {
+ // Execute applies a parsed template to the specified data object,
+ // writing the output to wr.
+ Execute(wr io.Writer, data interface{}) error
+}
+
+// LockIndexData holds the fields needed to display the index view for locks.
+type LockIndexData struct {
+ LockPath string
+ RepoFullName string
+ PullNum int
+ Path string
+ Workspace string
+ LockedBy string
+ Time time.Time
+ TimeFormatted string
+}
+
+// ApplyLockData holds the fields to display in the index view
+type ApplyLockData struct {
+ Locked bool
+ GlobalApplyLockEnabled bool
+ Time time.Time
+ TimeFormatted string
+}
+
+// IndexData holds the data for rendering the index page
+type IndexData struct {
+ Locks []LockIndexData
+ PullToJobMapping []jobs.PullInfoWithJobIDs
+
+ ApplyLock ApplyLockData
+ AtlantisVersion string
+ // CleanedBasePath is the path Atlantis is accessible at externally. If
+ // not using a path-based proxy, this will be an empty string. Never ends
+ // in a '/' (hence "cleaned").
+ CleanedBasePath string
+}
+
+var IndexTemplate = templates.Lookup(templateFileNames["index"])
+
+// LockDetailData holds the fields needed to display the lock detail view.
+type LockDetailData struct {
+ LockKeyEncoded string
+ LockKey string
+ RepoOwner string
+ RepoName string
+ PullRequestLink string
+ LockedBy string
+ Workspace string
+ AtlantisVersion string
+ // CleanedBasePath is the path Atlantis is accessible at externally. If
+ // not using a path-based proxy, this will be an empty string. Never ends
+ // in a '/' (hence "cleaned").
+ CleanedBasePath string
+}
+
+var LockTemplate = templates.Lookup(templateFileNames["lock"])
+
+// ProjectJobData holds the data needed to stream the current PR information
+type ProjectJobData struct {
+ AtlantisVersion string
+ ProjectPath string
+ CleanedBasePath string
+}
+
+var ProjectJobsTemplate = templates.Lookup(templateFileNames["project-jobs"])
+
+type ProjectJobsError struct {
+ AtlantisVersion string
+ ProjectPath string
+ CleanedBasePath string
+}
+
+var ProjectJobsErrorTemplate = templates.Lookup(templateFileNames["project-jobs-error"])
+
+// GithubSetupData holds the data for rendering the github app setup page
+type GithubSetupData struct {
+ Target string
+ Manifest string
+ ID int64
+ Key string
+ WebhookSecret string
+ URL string
+ CleanedBasePath string
+}
+
+var GithubAppSetupTemplate = templates.Lookup(templateFileNames["github-app"])
diff --git a/server/controllers/templates/web_templates_test.go b/server/controllers/web_templates/web_templates_test.go
similarity index 99%
rename from server/controllers/templates/web_templates_test.go
rename to server/controllers/web_templates/web_templates_test.go
index 5b88c3e1d9..22fd4e90fe 100644
--- a/server/controllers/templates/web_templates_test.go
+++ b/server/controllers/web_templates/web_templates_test.go
@@ -1,4 +1,4 @@
-package templates
+package web_templates
import (
"io"
diff --git a/server/events/apply_command_runner_test.go b/server/events/apply_command_runner_test.go
index 62c29458fa..43c000801c 100644
--- a/server/events/apply_command_runner_test.go
+++ b/server/events/apply_command_runner_test.go
@@ -261,7 +261,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) {
Once(),
},
ExpComment: "Ran Apply for 2 projects:\n\n" +
- "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " +
+ "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " +
"2. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored",
},
{
@@ -346,7 +346,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) {
Never(),
},
ExpComment: "Ran Apply for 2 projects:\n\n" +
- "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " +
+ "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " +
"2. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored",
},
{
@@ -399,7 +399,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) {
Once(),
},
ExpComment: "Ran Apply for 4 projects:\n\n" +
- "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " +
+ "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " +
"2. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " +
"3. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " +
"4. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### Apply Summary\n\n4 projects, 3 successful, 0 failed, 1 errored",
@@ -433,7 +433,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) {
Once(),
},
ExpComment: "Ran Apply for 2 projects:\n\n" +
- "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " +
+ "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " +
"2. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored",
},
{
@@ -463,7 +463,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) {
Once(),
},
ExpComment: "Ran Apply for 2 projects:\n\n" +
- "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " +
+ "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " +
"2. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored",
},
}
diff --git a/server/events/command_requirement_handler.go b/server/events/command_requirement_handler.go
index 5c7b1c1d54..bf95a255ce 100644
--- a/server/events/command_requirement_handler.go
+++ b/server/events/command_requirement_handler.go
@@ -33,7 +33,7 @@ func (a *DefaultCommandRequirementHandler) ValidatePlanProject(repoDir string, c
return "Pull request must be mergeable before running plan.", nil
}
case raw.UnDivergedRequirement:
- if a.WorkingDir.HasDiverged(repoDir) {
+ if a.WorkingDir.HasDiverged(ctx.Log, repoDir) {
return "Default branch must be rebased onto pull request before running plan.", nil
}
}
@@ -60,7 +60,7 @@ func (a *DefaultCommandRequirementHandler) ValidateApplyProject(repoDir string,
return "Pull request must be mergeable before running apply.", nil
}
case raw.UnDivergedRequirement:
- if a.WorkingDir.HasDiverged(repoDir) {
+ if a.WorkingDir.HasDiverged(ctx.Log, repoDir) {
return "Default branch must be rebased onto pull request before running apply.", nil
}
}
@@ -95,7 +95,7 @@ func (a *DefaultCommandRequirementHandler) ValidateImportProject(repoDir string,
return "Pull request must be mergeable before running import.", nil
}
case raw.UnDivergedRequirement:
- if a.WorkingDir.HasDiverged(repoDir) {
+ if a.WorkingDir.HasDiverged(ctx.Log, repoDir) {
return "Default branch must be rebased onto pull request before running import.", nil
}
}
diff --git a/server/events/command_requirement_handler_test.go b/server/events/command_requirement_handler_test.go
index 1c737f05aa..149e3a608b 100644
--- a/server/events/command_requirement_handler_test.go
+++ b/server/events/command_requirement_handler_test.go
@@ -9,6 +9,7 @@ import (
"github.com/runatlantis/atlantis/server/core/config/valid"
"github.com/runatlantis/atlantis/server/events"
"github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/logging"
"github.com/runatlantis/atlantis/server/events/command"
"github.com/runatlantis/atlantis/server/events/mocks"
@@ -46,7 +47,7 @@ func TestAggregateApplyRequirements_ValidatePlanProject(t *testing.T) {
ProjectPlanStatus: models.PassedPolicyCheckStatus,
},
setup: func(workingDir *mocks.MockWorkingDir) {
- When(workingDir.HasDiverged(Any[string]())).ThenReturn(false)
+ When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(false)
},
wantErr: assert.NoError,
},
@@ -76,7 +77,7 @@ func TestAggregateApplyRequirements_ValidatePlanProject(t *testing.T) {
PlanRequirements: []string{raw.UnDivergedRequirement},
},
setup: func(workingDir *mocks.MockWorkingDir) {
- When(workingDir.HasDiverged(Any[string]())).ThenReturn(true)
+ When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(true)
},
wantFailure: "Default branch must be rebased onto pull request before running plan.",
wantErr: assert.NoError,
@@ -130,7 +131,7 @@ func TestAggregateApplyRequirements_ValidateApplyProject(t *testing.T) {
ProjectPlanStatus: models.PassedPolicyCheckStatus,
},
setup: func(workingDir *mocks.MockWorkingDir) {
- When(workingDir.HasDiverged(Any[string]())).ThenReturn(false)
+ When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(false)
},
wantErr: assert.NoError,
},
@@ -184,7 +185,7 @@ func TestAggregateApplyRequirements_ValidateApplyProject(t *testing.T) {
ApplyRequirements: []string{raw.UnDivergedRequirement},
},
setup: func(workingDir *mocks.MockWorkingDir) {
- When(workingDir.HasDiverged(Any[string]())).ThenReturn(true)
+ When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(true)
},
wantFailure: "Default branch must be rebased onto pull request before running apply.",
wantErr: assert.NoError,
@@ -363,7 +364,7 @@ func TestAggregateApplyRequirements_ValidateImportProject(t *testing.T) {
ProjectPlanStatus: models.PassedPolicyCheckStatus,
},
setup: func(workingDir *mocks.MockWorkingDir) {
- When(workingDir.HasDiverged(Any[string]())).ThenReturn(false)
+ When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(false)
},
wantErr: assert.NoError,
},
@@ -393,7 +394,7 @@ func TestAggregateApplyRequirements_ValidateImportProject(t *testing.T) {
ImportRequirements: []string{raw.UnDivergedRequirement},
},
setup: func(workingDir *mocks.MockWorkingDir) {
- When(workingDir.HasDiverged(Any[string]())).ThenReturn(true)
+ When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(true)
},
wantFailure: "Default branch must be rebased onto pull request before running import.",
wantErr: assert.NoError,
diff --git a/server/events/command_runner.go b/server/events/command_runner.go
index b08690d1ec..14cdbce146 100644
--- a/server/events/command_runner.go
+++ b/server/events/command_runner.go
@@ -24,6 +24,7 @@ import (
"github.com/runatlantis/atlantis/server/events/command"
"github.com/runatlantis/atlantis/server/events/models"
"github.com/runatlantis/atlantis/server/events/vcs"
+ "github.com/runatlantis/atlantis/server/events/vcs/gitea"
"github.com/runatlantis/atlantis/server/logging"
"github.com/runatlantis/atlantis/server/metrics"
"github.com/runatlantis/atlantis/server/recovery"
@@ -97,6 +98,7 @@ type DefaultCommandRunner struct {
GithubPullGetter GithubPullGetter
AzureDevopsPullGetter AzureDevopsPullGetter
GitlabMergeRequestGetter GitlabMergeRequestGetter
+ GiteaPullGetter *gitea.GiteaClient
// User config option: Disables autoplan when a pull request is opened or updated.
DisableAutoplan bool
DisableAutoplanLabel string
@@ -386,6 +388,21 @@ func (c *DefaultCommandRunner) getGithubData(logger logging.SimpleLogging, baseR
return pull, headRepo, nil
}
+func (c *DefaultCommandRunner) getGiteaData(logger logging.SimpleLogging, baseRepo models.Repo, pullNum int) (models.PullRequest, models.Repo, error) {
+ if c.GiteaPullGetter == nil {
+ return models.PullRequest{}, models.Repo{}, errors.New("Atlantis not configured to support Gitea")
+ }
+ giteaPull, err := c.GiteaPullGetter.GetPullRequest(logger, baseRepo, pullNum)
+ if err != nil {
+ return models.PullRequest{}, models.Repo{}, errors.Wrap(err, "making pull request API call to Gitea")
+ }
+ pull, _, headRepo, err := c.EventParser.ParseGiteaPull(giteaPull)
+ if err != nil {
+ return pull, headRepo, errors.Wrap(err, "extracting required fields from comment data")
+ }
+ return pull, headRepo, nil
+}
+
func (c *DefaultCommandRunner) getGitlabData(logger logging.SimpleLogging, baseRepo models.Repo, pullNum int) (models.PullRequest, error) {
if c.GitlabMergeRequestGetter == nil {
return models.PullRequest{}, errors.New("Atlantis not configured to support GitLab")
@@ -446,6 +463,8 @@ func (c *DefaultCommandRunner) ensureValidRepoMetadata(
pull = *maybePull
case models.AzureDevops:
pull, headRepo, err = c.getAzureDevopsData(log, baseRepo, pullNum)
+ case models.Gitea:
+ pull, headRepo, err = c.getGiteaData(log, baseRepo, pullNum)
default:
err = errors.New("Unknown VCS type–this is a bug")
}
diff --git a/server/events/command_runner_test.go b/server/events/command_runner_test.go
index 1b5c77f461..8acea27b98 100644
--- a/server/events/command_runner_test.go
+++ b/server/events/command_runner_test.go
@@ -666,12 +666,16 @@ func TestRunUnlockCommand_VCSComment(t *testing.T) {
State: tc.prState,
}
modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num}
- When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
- When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil)
+ When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo),
+ Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
+ When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo,
+ testdata.GithubRepo, nil)
- ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock})
+ ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num,
+ &events.CommentCommand{Name: command.Unlock})
- deleteLockCommand.VerifyWasCalledOnce().DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)
+ deleteLockCommand.VerifyWasCalledOnce().DeleteLocksByPull(Any[logging.SimpleLogging](),
+ Eq(testdata.GithubRepo.FullName), Eq(testdata.Pull.Num))
vcsClient.VerifyWasCalledOnce().CreateComment(
Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num),
Eq("All Atlantis locks for this PR have been unlocked and plans discarded"), Eq("unlock"))
@@ -688,11 +692,15 @@ func TestRunUnlockCommandFail_VCSComment(t *testing.T) {
State: github.String("open"),
}
modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num}
- When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
- When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil)
- When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err"))
+ When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo),
+ Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
+ When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo,
+ testdata.GithubRepo, nil)
+ When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName),
+ Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err"))
- ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock})
+ ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num,
+ &events.CommentCommand{Name: command.Unlock})
vcsClient.VerifyWasCalledOnce().CreateComment(
Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("Failed to delete PR locks"), Eq("unlock"))
@@ -708,15 +716,20 @@ func TestRunUnlockCommandFail_DisableUnlockLabel(t *testing.T) {
State: github.String("open"),
}
modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num}
- When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
- When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil)
- When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err"))
- When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil)
-
- ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock})
-
- vcsClient.VerifyWasCalledOnce().CreateComment(
- Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("Not allowed to unlock PR with "+doNotUnlock+" label"), Eq("unlock"))
+ When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo),
+ Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
+ When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo,
+ testdata.GithubRepo, nil)
+ When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName),
+ Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err"))
+ When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo),
+ Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil)
+
+ ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num,
+ &events.CommentCommand{Name: command.Unlock})
+
+ vcsClient.VerifyWasCalledOnce().CreateComment(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo),
+ Eq(testdata.Pull.Num), Eq("Not allowed to unlock PR with "+doNotUnlock+" label"), Eq("unlock"))
}
func TestRunUnlockCommandFail_GetLabelsFail(t *testing.T) {
@@ -727,15 +740,20 @@ func TestRunUnlockCommandFail_GetLabelsFail(t *testing.T) {
State: github.String("open"),
}
modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num}
- When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
- When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil)
- When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err"))
- When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull))).ThenReturn(nil, errors.New("err"))
-
- ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock})
-
- vcsClient.VerifyWasCalledOnce().CreateComment(
- Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("Failed to retrieve PR labels... Not unlocking"), Eq("unlock"))
+ When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo),
+ Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
+ When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo,
+ testdata.GithubRepo, nil)
+ When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName),
+ Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err"))
+ When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo),
+ Eq(modelPull))).ThenReturn(nil, errors.New("err"))
+
+ ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num,
+ &events.CommentCommand{Name: command.Unlock})
+
+ vcsClient.VerifyWasCalledOnce().CreateComment(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num),
+ Eq("Failed to retrieve PR labels... Not unlocking"), Eq("unlock"))
}
func TestRunUnlockCommandDoesntRetrieveLabelsIfDisableUnlockLabelNotSet(t *testing.T) {
@@ -748,13 +766,18 @@ func TestRunUnlockCommandDoesntRetrieveLabelsIfDisableUnlockLabelNotSet(t *testi
State: github.String("open"),
}
modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num}
- When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
- When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil)
- When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err"))
- When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil)
+ When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo),
+ Eq(testdata.Pull.Num))).ThenReturn(pull, nil)
+ When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo,
+ testdata.GithubRepo, nil)
+ When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName),
+ Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err"))
+ When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo),
+ Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil)
unlockCommandRunner.DisableUnlockLabel = ""
- ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock})
+ ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num,
+ &events.CommentCommand{Name: command.Unlock})
vcsClient.VerifyWasCalled(Never()).GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull))
}
diff --git a/server/events/comment_parser.go b/server/events/comment_parser.go
index c4ec87bb6d..3b3d2d3b0a 100644
--- a/server/events/comment_parser.go
+++ b/server/events/comment_parser.go
@@ -79,6 +79,7 @@ type CommentBuilder interface {
type CommentParser struct {
GithubUser string
GitlabUser string
+ GiteaUser string
BitbucketUser string
AzureDevopsUser string
ExecutableName string
@@ -86,7 +87,7 @@ type CommentParser struct {
}
// NewCommentParser returns a CommentParser
-func NewCommentParser(githubUser, gitlabUser, bitbucketUser, azureDevopsUser, executableName string, allowCommands []command.Name) *CommentParser {
+func NewCommentParser(githubUser, gitlabUser, giteaUser, bitbucketUser, azureDevopsUser, executableName string, allowCommands []command.Name) *CommentParser {
var commentAllowCommands []command.Name
for _, acceptableCommand := range command.AllCommentCommands {
for _, allowCommand := range allowCommands {
@@ -100,6 +101,7 @@ func NewCommentParser(githubUser, gitlabUser, bitbucketUser, azureDevopsUser, ex
return &CommentParser{
GithubUser: githubUser,
GitlabUser: gitlabUser,
+ GiteaUser: giteaUser,
BitbucketUser: bitbucketUser,
AzureDevopsUser: azureDevopsUser,
ExecutableName: executableName,
@@ -174,6 +176,8 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com
vcsUser = e.GithubUser
case models.Gitlab:
vcsUser = e.GitlabUser
+ case models.Gitea:
+ vcsUser = e.GiteaUser
case models.BitbucketCloud, models.BitbucketServer:
vcsUser = e.BitbucketUser
case models.AzureDevops:
diff --git a/server/events/comment_parser_test.go b/server/events/comment_parser_test.go
index 9c4b19d4f5..45c22e7e5f 100644
--- a/server/events/comment_parser_test.go
+++ b/server/events/comment_parser_test.go
@@ -28,6 +28,7 @@ import (
var commentParser = events.CommentParser{
GithubUser: "github-user",
GitlabUser: "gitlab-user",
+ GiteaUser: "gitea-user",
ExecutableName: "atlantis",
AllowCommands: command.AllCommentCommands,
}
@@ -36,6 +37,7 @@ func TestNewCommentParser(t *testing.T) {
type args struct {
githubUser string
gitlabUser string
+ giteaUser string
bitbucketUser string
azureDevopsUser string
executableName string
@@ -68,7 +70,7 @@ func TestNewCommentParser(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- assert.Equalf(t, tt.want, events.NewCommentParser(tt.args.githubUser, tt.args.gitlabUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands), "NewCommentParser(%v, %v, %v, %v, %v, %v)", tt.args.githubUser, tt.args.gitlabUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands)
+ assert.Equalf(t, tt.want, events.NewCommentParser(tt.args.githubUser, tt.args.gitlabUser, tt.args.giteaUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands), "NewCommentParser(%v, %v, %v, %v, %v, %v)", tt.args.githubUser, tt.args.gitlabUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands)
})
}
}
@@ -266,6 +268,7 @@ func TestParse_InvalidCommand(t *testing.T) {
cp := events.NewCommentParser(
"github-user",
"gitlab-user",
+ "gitea-user",
"bitbucket-user",
"azure-devops-user",
"atlantis",
diff --git a/server/events/delete_lock_command.go b/server/events/delete_lock_command.go
index 89016503fb..1c9abcdda0 100644
--- a/server/events/delete_lock_command.go
+++ b/server/events/delete_lock_command.go
@@ -6,25 +6,24 @@ import (
"github.com/runatlantis/atlantis/server/logging"
)
-//go:generate pegomock generate --package mocks -o mocks/mock_delete_lock_command.go DeleteLockCommand
+//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_delete_lock_command.go DeleteLockCommand
// DeleteLockCommand is the first step after a command request has been parsed.
type DeleteLockCommand interface {
- DeleteLock(id string) (*models.ProjectLock, error)
- DeleteLocksByPull(repoFullName string, pullNum int) (int, error)
+ DeleteLock(logger logging.SimpleLogging, id string) (*models.ProjectLock, error)
+ DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) (int, error)
}
// DefaultDeleteLockCommand deletes a specific lock after a request from the LocksController.
type DefaultDeleteLockCommand struct {
Locker locking.Locker
- Logger logging.SimpleLogging
WorkingDir WorkingDir
WorkingDirLocker WorkingDirLocker
Backend locking.Backend
}
// DeleteLock handles deleting the lock at id
-func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, error) {
+func (l *DefaultDeleteLockCommand) DeleteLock(logger logging.SimpleLogging, id string) (*models.ProjectLock, error) {
lock, err := l.Locker.Unlock(id)
if err != nil {
return nil, err
@@ -33,9 +32,9 @@ func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, e
return nil, nil
}
- removeErr := l.WorkingDir.DeletePlan(lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName)
+ removeErr := l.WorkingDir.DeletePlan(logger, lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName)
if removeErr != nil {
- l.Logger.Warn("Failed to delete plan: %s", removeErr)
+ logger.Warn("Failed to delete plan: %s", removeErr)
return nil, removeErr
}
@@ -43,23 +42,23 @@ func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, e
}
// DeleteLocksByPull handles deleting all locks for the pull request
-func (l *DefaultDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) (int, error) {
+func (l *DefaultDeleteLockCommand) DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) (int, error) {
locks, err := l.Locker.UnlockByPull(repoFullName, pullNum)
numLocks := len(locks)
if err != nil {
return numLocks, err
}
if numLocks == 0 {
- l.Logger.Debug("No locks found for repo '%v', pull request: %v", repoFullName, pullNum)
+ logger.Debug("No locks found for repo '%v', pull request: %v", repoFullName, pullNum)
return numLocks, nil
}
for i := 0; i < numLocks; i++ {
lock := locks[i]
- err := l.WorkingDir.DeletePlan(lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName)
+ err := l.WorkingDir.DeletePlan(logger, lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName)
if err != nil {
- l.Logger.Warn("Failed to delete plan: %s", err)
+ logger.Warn("Failed to delete plan: %s", err)
return numLocks, err
}
}
diff --git a/server/events/delete_lock_command_test.go b/server/events/delete_lock_command_test.go
index 75ffe0488b..2e652770b9 100644
--- a/server/events/delete_lock_command_test.go
+++ b/server/events/delete_lock_command_test.go
@@ -15,33 +15,30 @@ import (
func TestDeleteLock_LockerErr(t *testing.T) {
t.Log("If there is an error retrieving the lock, we return the error")
+ logger := logging.NewNoopLogger(t)
RegisterMockTestingT(t)
l := lockmocks.NewMockLocker()
When(l.Unlock("id")).ThenReturn(nil, errors.New("err"))
- dlc := events.DefaultDeleteLockCommand{
- Locker: l,
- Logger: logging.NewNoopLogger(t),
- }
- _, err := dlc.DeleteLock("id")
+ dlc := events.DefaultDeleteLockCommand{Locker: l}
+ _, err := dlc.DeleteLock(logger, "id")
ErrEquals(t, "err", err)
}
func TestDeleteLock_None(t *testing.T) {
t.Log("If there is no lock at that ID we return nil")
+ logger := logging.NewNoopLogger(t)
RegisterMockTestingT(t)
l := lockmocks.NewMockLocker()
When(l.Unlock("id")).ThenReturn(nil, nil)
- dlc := events.DefaultDeleteLockCommand{
- Locker: l,
- Logger: logging.NewNoopLogger(t),
- }
- lock, err := dlc.DeleteLock("id")
+ dlc := events.DefaultDeleteLockCommand{Locker: l}
+ lock, err := dlc.DeleteLock(logger, "id")
Ok(t, err)
Assert(t, lock == nil, "lock was not nil")
}
func TestDeleteLock_Success(t *testing.T) {
t.Log("Delete lock deletes successfully the plan file")
+ logger := logging.NewNoopLogger(t)
RegisterMockTestingT(t)
l := lockmocks.NewMockLocker()
When(l.Unlock("id")).ThenReturn(&models.ProjectLock{}, nil)
@@ -66,19 +63,20 @@ func TestDeleteLock_Success(t *testing.T) {
Ok(t, err)
dlc := events.DefaultDeleteLockCommand{
Locker: l,
- Logger: logging.NewNoopLogger(t),
Backend: db,
WorkingDirLocker: workingDirLocker,
WorkingDir: workingDir,
}
- lock, err := dlc.DeleteLock("id")
+ lock, err := dlc.DeleteLock(logger, "id")
Ok(t, err)
Assert(t, lock != nil, "lock was nil")
- workingDir.VerifyWasCalledOnce().DeletePlan(pull.BaseRepo, pull, workspace, path, projectName)
+ workingDir.VerifyWasCalledOnce().DeletePlan(Any[logging.SimpleLogging](), Eq(pull.BaseRepo), Eq(pull), Eq(workspace),
+ Eq(path), Eq(projectName))
}
func TestDeleteLocksByPull_LockerErr(t *testing.T) {
t.Log("If there is an error retrieving the lock, returned a failed status")
+ logger := logging.NewNoopLogger(t)
repoName := "reponame"
pullNum := 2
RegisterMockTestingT(t)
@@ -87,16 +85,17 @@ func TestDeleteLocksByPull_LockerErr(t *testing.T) {
When(l.UnlockByPull(repoName, pullNum)).ThenReturn(nil, errors.New("err"))
dlc := events.DefaultDeleteLockCommand{
Locker: l,
- Logger: logging.NewNoopLogger(t),
WorkingDir: workingDir,
}
- _, err := dlc.DeleteLocksByPull(repoName, pullNum)
+ _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum)
ErrEquals(t, "err", err)
- workingDir.VerifyWasCalled(Never()).DeletePlan(Any[models.Repo](), Any[models.PullRequest](), Any[string](), Any[string](), Any[string]())
+ workingDir.VerifyWasCalled(Never()).DeletePlan(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string](), Any[string](), Any[string]())
}
func TestDeleteLocksByPull_None(t *testing.T) {
t.Log("If there is no lock at that ID there is no error")
+ logger := logging.NewNoopLogger(t)
repoName := "reponame"
pullNum := 2
RegisterMockTestingT(t)
@@ -105,16 +104,17 @@ func TestDeleteLocksByPull_None(t *testing.T) {
When(l.UnlockByPull(repoName, pullNum)).ThenReturn([]models.ProjectLock{}, nil)
dlc := events.DefaultDeleteLockCommand{
Locker: l,
- Logger: logging.NewNoopLogger(t),
WorkingDir: workingDir,
}
- _, err := dlc.DeleteLocksByPull(repoName, pullNum)
+ _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum)
Ok(t, err)
- workingDir.VerifyWasCalled(Never()).DeletePlan(Any[models.Repo](), Any[models.PullRequest](), Any[string](), Any[string](), Any[string]())
+ workingDir.VerifyWasCalled(Never()).DeletePlan(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string](), Any[string](), Any[string]())
}
func TestDeleteLocksByPull_SingleSuccess(t *testing.T) {
t.Log("If a single lock is successfully deleted")
+ logger := logging.NewNoopLogger(t)
repoName := "reponame"
pullNum := 2
path := "."
@@ -142,16 +142,17 @@ func TestDeleteLocksByPull_SingleSuccess(t *testing.T) {
)
dlc := events.DefaultDeleteLockCommand{
Locker: l,
- Logger: logging.NewNoopLogger(t),
WorkingDir: workingDir,
}
- _, err := dlc.DeleteLocksByPull(repoName, pullNum)
+ _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum)
Ok(t, err)
- workingDir.VerifyWasCalled(Once()).DeletePlan(pull.BaseRepo, pull, workspace, path, projectName)
+ workingDir.VerifyWasCalled(Once()).DeletePlan(Any[logging.SimpleLogging](), Eq(pull.BaseRepo), Eq(pull), Eq(workspace),
+ Eq(path), Eq(projectName))
}
func TestDeleteLocksByPull_MultipleSuccess(t *testing.T) {
t.Log("If multiple locks are successfully deleted")
+ logger := logging.NewNoopLogger(t)
repoName := "reponame"
pullNum := 2
path1 := "path1"
@@ -187,11 +188,10 @@ func TestDeleteLocksByPull_MultipleSuccess(t *testing.T) {
)
dlc := events.DefaultDeleteLockCommand{
Locker: l,
- Logger: logging.NewNoopLogger(t),
WorkingDir: workingDir,
}
- _, err := dlc.DeleteLocksByPull(repoName, pullNum)
+ _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum)
Ok(t, err)
- workingDir.VerifyWasCalled(Once()).DeletePlan(pull.BaseRepo, pull, workspace, path1, projectName)
- workingDir.VerifyWasCalled(Once()).DeletePlan(pull.BaseRepo, pull, workspace, path2, projectName)
+ workingDir.VerifyWasCalled(Once()).DeletePlan(logger, pull.BaseRepo, pull, workspace, path1, projectName)
+ workingDir.VerifyWasCalled(Once()).DeletePlan(logger, pull.BaseRepo, pull, workspace, path2, projectName)
}
diff --git a/server/events/event_parser.go b/server/events/event_parser.go
index 988d051f27..54abcebb26 100644
--- a/server/events/event_parser.go
+++ b/server/events/event_parser.go
@@ -20,6 +20,8 @@ import (
"path"
"strings"
+ giteasdk "code.gitea.io/sdk/gitea"
+
"github.com/go-playground/validator/v10"
"github.com/google/go-github/v59/github"
lru "github.com/hashicorp/golang-lru/v2"
@@ -29,6 +31,7 @@ import (
"github.com/runatlantis/atlantis/server/events/models"
"github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud"
"github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver"
+ "github.com/runatlantis/atlantis/server/events/vcs/gitea"
"github.com/runatlantis/atlantis/server/logging"
"github.com/xanzy/go-gitlab"
)
@@ -337,6 +340,14 @@ type EventParsing interface {
// ParseAzureDevopsRepo parses the response from the Azure DevOps API endpoint that
// returns a repo into the Atlantis model.
ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) (models.Repo, error)
+
+ ParseGiteaPullRequestEvent(event giteasdk.PullRequest) (
+ pull models.PullRequest, pullEventType models.PullRequestEventType,
+ baseRepo models.Repo, headRepo models.Repo, user models.User, err error)
+
+ ParseGiteaIssueCommentEvent(event gitea.GiteaIssueCommentPayload) (baseRepo models.Repo, user models.User, pullNum int, err error)
+
+ ParseGiteaPull(pull *giteasdk.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error)
}
// EventParser parses VCS events.
@@ -345,6 +356,8 @@ type EventParser struct {
GithubToken string
GitlabUser string
GitlabToken string
+ GiteaUser string
+ GiteaToken string
AllowDraftPRs bool
BitbucketUser string
BitbucketToken string
@@ -357,6 +370,8 @@ func (e *EventParser) ParseAPIPlanRequest(vcsHostType models.VCSHostType, repoFu
switch vcsHostType {
case models.Github:
return models.NewRepo(vcsHostType, repoFullName, cloneURL, e.GithubUser, e.GithubToken)
+ case models.Gitea:
+ return models.NewRepo(vcsHostType, repoFullName, cloneURL, e.GiteaUser, e.GiteaToken)
case models.Gitlab:
return models.NewRepo(vcsHostType, repoFullName, cloneURL, e.GitlabUser, e.GitlabToken)
}
@@ -611,6 +626,13 @@ func (e *EventParser) ParseGithubRepo(ghRepo *github.Repository) (models.Repo, e
return models.NewRepo(models.Github, ghRepo.GetFullName(), ghRepo.GetCloneURL(), e.GithubUser, e.GithubToken)
}
+// ParseGiteaRepo parses the response from the Gitea API endpoint that
+// returns a repo into the Atlantis model.
+// See EventParsing for return value docs.
+func (e *EventParser) ParseGiteaRepo(repo giteasdk.Repository) (models.Repo, error) {
+ return models.NewRepo(models.Gitea, repo.FullName, repo.CloneURL, e.GiteaUser, e.GiteaToken)
+}
+
// ParseGitlabMergeRequestUpdateEvent dives deeper into Gitlab merge request update events
func (e *EventParser) ParseGitlabMergeRequestUpdateEvent(event gitlab.MergeEvent) models.PullRequestEventType {
// New commit to opened MR
@@ -703,6 +725,27 @@ func (e *EventParser) ParseGitlabMergeRequestCommentEvent(event gitlab.MergeComm
return
}
+func (e *EventParser) ParseGiteaIssueCommentEvent(comment gitea.GiteaIssueCommentPayload) (baseRepo models.Repo, user models.User, pullNum int, err error) {
+ baseRepo, err = e.ParseGiteaRepo(comment.Repository)
+ if err != nil {
+ return
+ }
+ if comment.Comment.Body == "" || comment.Comment.Poster.UserName == "" {
+ err = errors.New("comment.user.login is null")
+ return
+ }
+ commenterUsername := comment.Comment.Poster.UserName
+ user = models.User{
+ Username: commenterUsername,
+ }
+ pullNum = int(comment.Issue.Index)
+ if pullNum == 0 {
+ err = errors.New("issue.number is null")
+ return
+ }
+ return
+}
+
// ParseGitlabMergeRequest parses the merge requests and returns a pull request
// model. We require passing in baseRepo because we can't get this information
// from the merge request. The only caller of this function already has that
@@ -989,3 +1032,121 @@ func (e *EventParser) ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) (m
fullName := fmt.Sprintf("%s/%s/%s", owner, project, repo)
return models.NewRepo(models.AzureDevops, fullName, cloneURL, e.AzureDevopsUser, e.AzureDevopsToken)
}
+
+func (e *EventParser) ParseGiteaPullRequestEvent(event giteasdk.PullRequest) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) {
+ var pullEventType models.PullRequestEventType
+
+ // Determine the event type based on the state of the pull request and whether it's merged.
+ switch {
+ case event.State == giteasdk.StateOpen:
+ pullEventType = models.OpenedPullEvent
+ case event.HasMerged:
+ pullEventType = models.ClosedPullEvent
+ default:
+ pullEventType = models.OtherPullEvent
+ }
+
+ // Parse the base repository.
+ baseRepo, err := models.NewRepo(
+ models.Gitea,
+ event.Base.Repository.FullName,
+ event.Base.Repository.CloneURL,
+ e.GiteaUser,
+ e.GiteaToken,
+ )
+ if err != nil {
+ return models.PullRequest{}, models.OtherPullEvent, models.Repo{}, models.Repo{}, models.User{}, err
+ }
+
+ // Parse the head repository.
+ headRepo, err := models.NewRepo(
+ models.Gitea,
+ event.Head.Repository.FullName,
+ event.Head.Repository.CloneURL,
+ e.GiteaUser,
+ e.GiteaToken,
+ )
+ if err != nil {
+ return models.PullRequest{}, models.OtherPullEvent, models.Repo{}, models.Repo{}, models.User{}, err
+ }
+
+ // Construct the pull request model.
+ pull := models.PullRequest{
+ Num: int(event.Index),
+ URL: event.HTMLURL,
+ HeadCommit: event.Head.Sha,
+ HeadBranch: (*event.Head).Ref,
+ BaseBranch: event.Base.Ref,
+ Author: event.Poster.UserName,
+ BaseRepo: baseRepo,
+ }
+
+ // Parse the user who made the pull request.
+ user := models.User{
+ Username: event.Poster.UserName,
+ }
+ return pull, pullEventType, baseRepo, headRepo, user, nil
+}
+
+// ParseGithubPull parses the response from the GitHub API endpoint (not
+// from a webhook) that returns a pull request.
+// See EventParsing for return value docs.
+func (e *EventParser) ParseGiteaPull(pull *giteasdk.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) {
+ commit := pull.Head.Sha
+ if commit == "" {
+ err = errors.New("head.sha is null")
+ return
+ }
+ url := pull.HTMLURL
+ if url == "" {
+ err = errors.New("html_url is null")
+ return
+ }
+ headBranch := pull.Head.Ref
+ if headBranch == "" {
+ err = errors.New("head.ref is null")
+ return
+ }
+ baseBranch := pull.Base.Ref
+ if baseBranch == "" {
+ err = errors.New("base.ref is null")
+ return
+ }
+
+ authorUsername := pull.Poster.UserName
+ if authorUsername == "" {
+ err = errors.New("user.login is null")
+ return
+ }
+ num := pull.Index
+ if num == 0 {
+ err = errors.New("number is null")
+ return
+ }
+
+ baseRepo, err = e.ParseGiteaRepo(*pull.Base.Repository)
+ if err != nil {
+ return
+ }
+ headRepo, err = e.ParseGiteaRepo(*pull.Head.Repository)
+ if err != nil {
+ return
+ }
+
+ pullState := models.ClosedPullState
+ if pull.State == "open" {
+ pullState = models.OpenPullState
+ }
+
+ pullModel = models.PullRequest{
+ Author: authorUsername,
+ HeadBranch: headBranch,
+ HeadCommit: commit,
+ URL: url,
+ Num: int(num),
+ State: pullState,
+ BaseRepo: baseRepo,
+ BaseBranch: baseBranch,
+ }
+ return
+}
diff --git a/server/events/github_app_working_dir.go b/server/events/github_app_working_dir.go
index 85435f8590..a06599efe0 100644
--- a/server/events/github_app_working_dir.go
+++ b/server/events/github_app_working_dir.go
@@ -5,6 +5,7 @@ import (
"github.com/runatlantis/atlantis/server/events/models"
"github.com/runatlantis/atlantis/server/events/vcs"
+ "github.com/runatlantis/atlantis/server/logging"
)
const redactedReplacement = "://:@"
@@ -19,7 +20,7 @@ type GithubAppWorkingDir struct {
}
// Clone writes a fresh token for Github App authentication
-func (g *GithubAppWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) {
+func (g *GithubAppWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) {
baseRepo := &p.BaseRepo
// Realistically, this is a super brittle way of supporting clones using gh app installation tokens
@@ -35,5 +36,5 @@ func (g *GithubAppWorkingDir) Clone(headRepo models.Repo, p models.PullRequest,
headRepo.CloneURL = strings.Replace(headRepo.CloneURL, "://:@", replacement, 1)
headRepo.SanitizedCloneURL = strings.Replace(baseRepo.SanitizedCloneURL, redactedReplacement, replacement, 1)
- return g.WorkingDir.Clone(headRepo, p, workspace)
+ return g.WorkingDir.Clone(logger, headRepo, p, workspace)
}
diff --git a/server/events/github_app_working_dir_test.go b/server/events/github_app_working_dir_test.go
index 28983da870..78e64d4e0b 100644
--- a/server/events/github_app_working_dir_test.go
+++ b/server/events/github_app_working_dir_test.go
@@ -29,7 +29,6 @@ func TestClone_GithubAppNoneExisting(t *testing.T) {
DataDir: dataDir,
CheckoutMerge: false,
TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir),
- Logger: logger,
}
defer disableSSLVerification()()
@@ -46,7 +45,7 @@ func TestClone_GithubAppNoneExisting(t *testing.T) {
GithubHostname: testServer,
}
- cloneDir, _, err := gwd.Clone(models.Repo{}, models.PullRequest{
+ cloneDir, _, err := gwd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
}, "default")
@@ -58,6 +57,8 @@ func TestClone_GithubAppNoneExisting(t *testing.T) {
}
func TestClone_GithubAppSetsCorrectUrl(t *testing.T) {
+ logger := logging.NewNoopLogger(t)
+
RegisterMockTestingT(t)
workingDir := eventMocks.NewMockWorkingDir()
@@ -88,13 +89,12 @@ func TestClone_GithubAppSetsCorrectUrl(t *testing.T) {
modifiedBaseRepo.SanitizedCloneURL = "https://github.com/runatlantis/atlantis.git"
When(credentials.GetToken()).ThenReturn("token", nil)
- When(workingDir.Clone(modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default")).ThenReturn(
- "", true, nil,
- )
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Eq(modifiedBaseRepo), Eq(models.PullRequest{BaseRepo: modifiedBaseRepo}),
+ Eq("default"))).ThenReturn("", true, nil)
- _, success, _ := ghAppWorkingDir.Clone(headRepo, models.PullRequest{BaseRepo: baseRepo}, "default")
+ _, success, _ := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default")
- workingDir.VerifyWasCalledOnce().Clone(modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default")
+ workingDir.VerifyWasCalledOnce().Clone(logger, modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default")
Assert(t, success == true, "clone url mutation error")
}
diff --git a/server/events/markdown_renderer.go b/server/events/markdown_renderer.go
index 74a72c6719..5bbfc8a47e 100644
--- a/server/events/markdown_renderer.go
+++ b/server/events/markdown_renderer.go
@@ -72,6 +72,7 @@ type commonData struct {
EnableDiffMarkdownFormat bool
ExecutableName string
HideUnchangedPlanComments bool
+ VcsRequestType string
}
// errData is data about an error response.
@@ -170,13 +171,20 @@ func NewMarkdownRenderer(
// Render formats the data into a markdown string.
// nolint: interfacer
-func (m *MarkdownRenderer) Render(res command.Result, cmdName command.Name, subCmd, log string, verbose bool, vcsHost models.VCSHostType) string {
- commandStr := cases.Title(language.English).String(strings.Replace(cmdName.String(), "_", " ", -1))
+func (m *MarkdownRenderer) Render(ctx *command.Context, res command.Result, cmd PullCommand) string {
+ commandStr := cases.Title(language.English).String(strings.Replace(cmd.CommandName().String(), "_", " ", -1))
+ var vcsRequestType string
+ if ctx.Pull.BaseRepo.VCSHost.Type == models.Gitlab {
+ vcsRequestType = "Merge Request"
+ } else {
+ vcsRequestType = "Pull Request"
+ }
+
common := commonData{
Command: commandStr,
- SubCommand: subCmd,
- Verbose: verbose,
- Log: log,
+ SubCommand: cmd.SubCommandName(),
+ Verbose: cmd.IsVerbose(),
+ Log: ctx.Log.GetHistory(),
PlansDeleted: res.PlansDeleted,
DisableApplyAll: m.disableApplyAll || m.disableApply,
DisableApply: m.disableApply,
@@ -184,6 +192,7 @@ func (m *MarkdownRenderer) Render(res command.Result, cmdName command.Name, subC
EnableDiffMarkdownFormat: m.enableDiffMarkdownFormat,
ExecutableName: m.executableName,
HideUnchangedPlanComments: m.hideUnchangedPlanComments,
+ VcsRequestType: vcsRequestType,
}
templates := m.markdownTemplates
@@ -194,10 +203,12 @@ func (m *MarkdownRenderer) Render(res command.Result, cmdName command.Name, subC
if res.Failure != "" {
return m.renderTemplateTrimSpace(templates.Lookup("failureWithLog"), failureData{res.Failure, "", common})
}
- return m.renderProjectResults(res.ProjectResults, common, vcsHost)
+ return m.renderProjectResults(ctx, res.ProjectResults, common)
}
-func (m *MarkdownRenderer) renderProjectResults(results []command.ProjectResult, common commonData, vcsHost models.VCSHostType) string {
+func (m *MarkdownRenderer) renderProjectResults(ctx *command.Context, results []command.ProjectResult, common commonData) string {
+ vcsHost := ctx.Pull.BaseRepo.VCSHost.Type
+
var resultsTmplData []projectResultTmplData
numPlanSuccesses := 0
numPolicyCheckSuccesses := 0
diff --git a/server/events/markdown_renderer_test.go b/server/events/markdown_renderer_test.go
index eebd1a8b87..ace23c443a 100644
--- a/server/events/markdown_renderer_test.go
+++ b/server/events/markdown_renderer_test.go
@@ -23,6 +23,7 @@ import (
"github.com/runatlantis/atlantis/server/events"
"github.com/runatlantis/atlantis/server/events/command"
"github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/logging"
. "github.com/runatlantis/atlantis/testing"
)
@@ -60,17 +61,36 @@ func TestRenderErr(t *testing.T) {
}
r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
for _, c := range cases {
res := command.Result{
Error: c.Error,
}
for _, verbose := range []bool{true, false} {
t.Run(fmt.Sprintf("%s_%t", c.Description, verbose), func(t *testing.T) {
- s := r.Render(res, c.Command, "", "log", verbose, models.Github)
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ Verbose: verbose,
+ }
+ s := r.Render(ctx, res, cmd)
if !verbose {
Equals(t, normalize(c.Expected), normalize(s))
} else {
- Equals(t, normalize(c.Expected)+"\n\nLog \n \n\n```\nlog```\n
", normalize(s))
+ log := fmt.Sprintf("[INFO] %s", logText)
+ Equals(t, normalize(c.Expected+
+ fmt.Sprintf("\nLog \n\n\n```\n%s\n```\n
", log)), normalize(s))
}
})
}
@@ -88,34 +108,54 @@ func TestRenderFailure(t *testing.T) {
"apply failure",
command.Apply,
"failure",
- "**Apply Failed**: failure\n",
+ "**Apply Failed**: failure",
},
{
"plan failure",
command.Plan,
"failure",
- "**Plan Failed**: failure\n",
+ "**Plan Failed**: failure",
},
{
"policy check failure",
command.PolicyCheck,
"failure",
- "**Policy Check Failed**: failure\n",
+ "**Policy Check Failed**: failure",
},
}
r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
+
for _, c := range cases {
res := command.Result{
Failure: c.Failure,
}
for _, verbose := range []bool{true, false} {
t.Run(fmt.Sprintf("%s_%t", c.Description, verbose), func(t *testing.T) {
- s := r.Render(res, c.Command, "", "log", verbose, models.Github)
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ Verbose: verbose,
+ }
+ s := r.Render(ctx, res, cmd)
if !verbose {
Equals(t, normalize(c.Expected), normalize(s))
} else {
- Equals(t, normalize(c.Expected+"\nLog \n \n\n```\nlog```\n
"), normalize(s))
+ log := fmt.Sprintf("[INFO] %s", logText)
+ Equals(t, normalize(c.Expected+
+ fmt.Sprintf("\nLog \n\n\n```\n%s\n```\n
", log)), normalize(s))
}
})
}
@@ -124,11 +164,27 @@ func TestRenderFailure(t *testing.T) {
func TestRenderErrAndFailure(t *testing.T) {
r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
res := command.Result{
Error: errors.New("error"),
Failure: "failure",
}
- s := r.Render(res, command.Plan, "", "", false, models.Github)
+ cmd := &events.CommentCommand{
+ Name: command.Plan,
+ Verbose: false,
+ }
+
+ s := r.Render(ctx, res, cmd)
Equals(t, "**Plan Error**\n```\nerror\n```", normalize(s))
}
@@ -147,7 +203,7 @@ func TestRenderProjectResults(t *testing.T) {
"",
[]command.ProjectResult{},
models.Github,
- "Ran Plan for 0 projects:\n\n\n",
+ "Ran Plan for 0 projects:\n\n",
},
{
"single successful plan",
@@ -166,23 +222,32 @@ func TestRenderProjectResults(t *testing.T) {
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -203,25 +268,33 @@ $$$
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
-
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
:twisted_rightwards_arrows: Upstream was modified, a new merge was performed.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -242,23 +315,32 @@ $$$
},
},
models.Github,
- `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -296,7 +378,8 @@ $$$
},
},
models.Github,
- `Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$
#### Policy Set: $policy1$
$$$diff
@@ -317,16 +400,24 @@ policy set: policy1: requires: 1 approval(s), have: 0.
policy set: policy2: passed.
$$$
* :heavy_check_mark: To **approve** this project, comment:
- * $$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -357,7 +448,8 @@ $$$
},
},
models.Github,
- `Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$
Show Output
@@ -382,26 +474,33 @@ FAIL - - main - WARNING: Null Resource creation is prohibit
$$$
+
#### Policy Approval Status:
$$$
policy set: policy1: requires: 1 approval(s), have: 0.
$$$
* :heavy_check_mark: To **approve** this project, comment:
- * $$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * $atlantis plan -d path -w workspace$
-
-
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
$$$
policy set: policy1: 2 tests, 1 passed, 0 warnings, 1 failure, 0 exceptions
$$$
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -420,7 +519,8 @@ $$$
},
},
models.Github,
- `Ran Import for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran Import for project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
import-output
@@ -429,7 +529,9 @@ $$$
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
`,
},
{
@@ -448,7 +550,8 @@ $$$
},
},
models.Github,
- `Ran State $rm$ for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran State $rm$ for project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
state-rm-output
@@ -457,7 +560,9 @@ $$$
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
`,
},
{
@@ -472,7 +577,8 @@ $$$
},
},
models.Github,
- `Ran Apply for dir: $path$ workspace: $workspace$
+ `
+Ran Apply for dir: $path$ workspace: $workspace$
$$$diff
success
@@ -492,7 +598,8 @@ $$$
},
},
models.Github,
- `Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
success
@@ -527,10 +634,12 @@ $$$
},
},
models.Github,
- `Ran Plan for 2 projects:
+ `
+Ran Plan for 2 projects:
1. dir: $path$ workspace: $workspace$
1. project: $projectname$ dir: $path2$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -538,10 +647,14 @@ terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 2. project: $projectname$ dir: $path2$ workspace: $workspace$
@@ -550,20 +663,28 @@ terraform-output2
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path2 -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2)
+ $$$shell
+ atlantis apply -d path2 -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path2 -w workspace$
+ $$$shell
+ atlantis plan -d path2 -w workspace
+ $$$
---
### Plan Summary
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -605,10 +726,12 @@ $$$
},
},
models.Github,
- `Ran Policy Check for 2 projects:
+ `
+Ran Policy Check for 2 projects:
1. dir: $path$ workspace: $workspace$
1. project: $projectname$ dir: $path2$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
#### Policy Set: $policy1$
@@ -618,10 +741,14 @@ $$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 2. project: $projectname$ dir: $path2$ workspace: $workspace$
@@ -632,16 +759,24 @@ $$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path2 -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2)
+ $$$shell
+ atlantis apply -d path2 -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2)
* :repeat: To re-run policies **plan** this project again by commenting:
- * $atlantis plan -d path2 -w workspace$
+ $$$shell
+ atlantis plan -d path2 -w workspace
+ $$$
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -662,10 +797,12 @@ $$$
},
},
models.Github,
- `Ran Apply for 2 projects:
+ `
+Ran Apply for 2 projects:
1. project: $projectname$ dir: $path$ workspace: $workspace$
1. dir: $path2$ workspace: $workspace$
+---
### 1. project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
@@ -696,7 +833,8 @@ $$$
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
**Plan Error**
$$$
@@ -716,7 +854,8 @@ $$$
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
**Plan Failed**: failure
`,
@@ -749,11 +888,13 @@ $$$
},
},
models.Github,
- `Ran Plan for 3 projects:
+ `
+Ran Plan for 3 projects:
1. dir: $path$ workspace: $workspace$
1. dir: $path2$ workspace: $workspace$
1. project: $projectname$ dir: $path3$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -761,10 +902,14 @@ terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 2. dir: $path2$ workspace: $workspace$
@@ -782,10 +927,14 @@ $$$
3 projects, 1 with changes, 0 with no changes, 2 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -833,11 +982,13 @@ $$$
},
},
models.Github,
- `Ran Policy Check for 3 projects:
+ `
+Ran Policy Check for 3 projects:
1. dir: $path$ workspace: $workspace$
1. dir: $path2$ workspace: $workspace$
1. project: $projectname$ dir: $path3$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
#### Policy Set: $policy1$
@@ -847,10 +998,14 @@ $$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 2. dir: $path2$ workspace: $workspace$
@@ -866,10 +1021,14 @@ $$$
policy set: policy1: requires: 1 approval(s), have: 0.
$$$
* :heavy_check_mark: To **approve** this project, comment:
- * $$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 3. project: $projectname$ dir: $path3$ workspace: $workspace$
@@ -879,12 +1038,18 @@ error
$$$
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * $atlantis approve_policies$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis approve_policies
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
* :repeat: To re-run policies **plan** this project again by commenting:
- * $atlantis plan$
+ $$$shell
+ atlantis plan
+ $$$
`,
},
{
@@ -909,11 +1074,13 @@ $$$
},
},
models.Github,
- `Ran Apply for 3 projects:
+ `
+Ran Apply for 3 projects:
1. dir: $path$ workspace: $workspace$
1. dir: $path2$ workspace: $workspace$
1. dir: $path3$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -959,11 +1126,13 @@ $$$
},
},
models.Github,
- `Ran Apply for 3 projects:
+ `
+Ran Apply for 3 projects:
1. dir: $path$ workspace: $workspace$
1. dir: $path2$ workspace: $workspace$
1. dir: $path3$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -990,6 +1159,19 @@ $$$
}
r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
for _, c := range cases {
t.Run(c.Description, func(t *testing.T) {
res := command.Result{
@@ -997,11 +1179,18 @@ $$$
}
for _, verbose := range []bool{true, false} {
t.Run(c.Description, func(t *testing.T) {
- s := r.Render(res, c.Command, c.SubCommand, "log", verbose, c.VCSHost)
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ SubName: c.SubCommand,
+ Verbose: verbose,
+ }
+ s := r.Render(ctx, res, cmd)
if !verbose {
Equals(t, normalize(c.Expected), normalize(s))
} else {
- Equals(t, normalize(c.Expected+"\nLog \n \n\n```\nlog```\n
"), normalize(s))
+ log := fmt.Sprintf("[INFO] %s", logText)
+ Equals(t, normalize(c.Expected+
+ fmt.Sprintf("Log \n\n\n```\n%s\n```\n
", log)), normalize(s))
}
})
}
@@ -1034,17 +1223,22 @@ func TestRenderProjectResultsDisableApplyAll(t *testing.T) {
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
`,
},
{
@@ -1064,17 +1258,22 @@ $$$
},
},
models.Github,
- `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
`,
},
{
@@ -1104,10 +1303,12 @@ $$$
},
},
models.Github,
- `Ran Plan for 2 projects:
+ `
+Ran Plan for 2 projects:
1. dir: $path$ workspace: $workspace$
1. project: $projectname$ dir: $path2$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -1115,10 +1316,14 @@ terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 2. project: $projectname$ dir: $path2$ workspace: $workspace$
@@ -1127,10 +1332,14 @@ terraform-output2
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path2 -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2)
+ $$$shell
+ atlantis apply -d path2 -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path2 -w workspace$
+ $$$shell
+ atlantis plan -d path2 -w workspace
+ $$$
---
### Plan Summary
@@ -1150,6 +1359,19 @@ $$$
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
for _, c := range cases {
t.Run(c.Description, func(t *testing.T) {
res := command.Result{
@@ -1157,11 +1379,17 @@ $$$
}
for _, verbose := range []bool{true, false} {
t.Run(c.Description, func(t *testing.T) {
- s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost)
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ Verbose: verbose,
+ }
+ s := r.Render(ctx, res, cmd)
if !verbose {
Equals(t, normalize(c.Expected), normalize(s))
} else {
- Equals(t, normalize(c.Expected+"\nLog \n \n\n```\nlog```\n
"), normalize(s))
+ log := fmt.Sprintf("[INFO] %s", logText)
+ Equals(t, normalize(c.Expected)+
+ fmt.Sprintf("\nLog \n\n\n```\n%s\n```\n
", log), normalize(s))
}
})
}
@@ -1169,7 +1397,7 @@ $$$
}
}
-// Test that if disable apply is set then the apply footer is not added
+// Test that if disable apply is set then the apply footer is not added
func TestRenderProjectResultsDisableApply(t *testing.T) {
cases := []struct {
Description string
@@ -1194,15 +1422,18 @@ func TestRenderProjectResultsDisableApply(t *testing.T) {
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
`,
},
{
@@ -1222,15 +1453,18 @@ $$$
},
},
models.Github,
- `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
`,
},
{
@@ -1260,19 +1494,23 @@ $$$
},
},
models.Github,
- `Ran Plan for 2 projects:
+ `
+Ran Plan for 2 projects:
1. dir: $path$ workspace: $workspace$
1. project: $projectname$ dir: $path2$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 2. project: $projectname$ dir: $path2$ workspace: $workspace$
@@ -1280,9 +1518,11 @@ $$$diff
terraform-output2
$$$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2)
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path2 -w workspace$
+ $$$shell
+ atlantis plan -d path2 -w workspace
+ $$$
---
### Plan Summary
@@ -1303,6 +1543,19 @@ $$$
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
for _, c := range cases {
t.Run(c.Description, func(t *testing.T) {
res := command.Result{
@@ -1310,11 +1563,17 @@ $$$
}
for _, verbose := range []bool{true, false} {
t.Run(c.Description, func(t *testing.T) {
- s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost)
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ Verbose: verbose,
+ }
+ s := r.Render(ctx, res, cmd)
if !verbose {
Equals(t, normalize(c.Expected), normalize(s))
} else {
- Equals(t, normalize(c.Expected+"\nLog \n \n\n```\nlog```\n
"), normalize(s))
+ log := fmt.Sprintf("[INFO] %s", logText)
+ Equals(t, normalize(c.Expected)+
+ fmt.Sprintf("\nLog \n\n\n```\n%s\n```\n
", log), normalize(s))
}
})
}
@@ -1342,8 +1601,21 @@ func TestRenderCustomPolicyCheckTemplate_DisableApplyAll(t *testing.T) {
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
- rendered := r.Render(command.Result{
+ res := command.Result{
ProjectResults: []command.ProjectResult{
{
Workspace: "workspace",
@@ -1361,8 +1633,14 @@ func TestRenderCustomPolicyCheckTemplate_DisableApplyAll(t *testing.T) {
},
},
},
- }, command.PolicyCheck, "", "log", false, models.Github)
- exp = `Ran Policy Check for dir: $path$ workspace: $workspace$
+ }
+ cmd := &events.CommentCommand{
+ Name: command.PolicyCheck,
+ Verbose: false,
+ }
+ rendered := r.Render(ctx, res, cmd)
+ exp = `
+Ran Policy Check for dir: $path$ workspace: $workspace$
#### Policy Set: $policy1$
$$$diff
@@ -1371,10 +1649,15 @@ $$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To re-run policies **plan** this project again by commenting:
- * $atlantis plan -d path -w workspace$`
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
+`
Equals(t, normalize(exp), normalize(rendered))
}
@@ -1392,8 +1675,20 @@ func TestRenderProjectResults_DisableFolding(t *testing.T) {
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
-
- rendered := mr.Render(command.Result{
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
+ res := command.Result{
ProjectResults: []command.ProjectResult{
{
RepoRelDir: ".",
@@ -1401,7 +1696,12 @@ func TestRenderProjectResults_DisableFolding(t *testing.T) {
Error: errors.New(strings.Repeat("line\n", 13)),
},
},
- }, command.Plan, "", "log", false, models.Github)
+ }
+ cmd := &events.CommentCommand{
+ Name: command.Plan,
+ Verbose: false,
+ }
+ rendered := mr.Render(ctx, res, cmd)
Equals(t, false, strings.Contains(rendered, "\n"))
}
@@ -1484,8 +1784,20 @@ func TestRenderProjectResults_WrappedErr(t *testing.T) {
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
-
- rendered := mr.Render(command.Result{
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: c.VCSHost,
+ },
+ },
+ },
+ }
+ res := command.Result{
ProjectResults: []command.ProjectResult{
{
RepoRelDir: ".",
@@ -1493,10 +1805,16 @@ func TestRenderProjectResults_WrappedErr(t *testing.T) {
Error: errors.New(c.Output),
},
},
- }, command.Plan, "", "log", false, c.VCSHost)
+ }
+ cmd := &events.CommentCommand{
+ Name: command.Plan,
+ Verbose: false,
+ }
+ rendered := mr.Render(ctx, res, cmd)
var exp string
if c.ShouldWrap {
- exp = `Ran Plan for dir: $.$ workspace: $default$
+ exp = `
+Ran Plan for dir: $.$ workspace: $default$
**Plan Error**
Show Output
@@ -1504,14 +1822,16 @@ func TestRenderProjectResults_WrappedErr(t *testing.T) {
$$$
` + c.Output + `
$$$
- `
+
+`
} else {
exp = `Ran Plan for dir: $.$ workspace: $default$
**Plan Error**
$$$
` + c.Output + `
-$$$`
+$$$
+`
}
Equals(t, normalize(exp), normalize(rendered))
})
@@ -1523,69 +1843,80 @@ $$$`
func TestRenderProjectResults_WrapSingleProject(t *testing.T) {
cases := []struct {
VCSHost models.VCSHostType
+ VcsRequestType string
GitlabCommonMarkSupport bool
Output string
ShouldWrap bool
}{
{
- VCSHost: models.Github,
- Output: strings.Repeat("line\n", 1),
- ShouldWrap: false,
+ VCSHost: models.Github,
+ VcsRequestType: "Pull Request",
+ Output: strings.Repeat("line\n", 1),
+ ShouldWrap: false,
},
{
- VCSHost: models.Github,
- Output: strings.Repeat("line\n", 13) + "No changes. Infrastructure is up-to-date.",
- ShouldWrap: true,
+ VCSHost: models.Github,
+ VcsRequestType: "Pull Request",
+ Output: strings.Repeat("line\n", 13) + "No changes. Infrastructure is up-to-date.",
+ ShouldWrap: true,
},
{
VCSHost: models.Gitlab,
+ VcsRequestType: "Merge Request",
GitlabCommonMarkSupport: false,
Output: strings.Repeat("line\n", 1),
ShouldWrap: false,
},
{
VCSHost: models.Gitlab,
+ VcsRequestType: "Merge Request",
GitlabCommonMarkSupport: false,
Output: strings.Repeat("line\n", 13),
ShouldWrap: false,
},
{
VCSHost: models.Gitlab,
+ VcsRequestType: "Merge Request",
GitlabCommonMarkSupport: true,
Output: strings.Repeat("line\n", 1),
ShouldWrap: false,
},
{
VCSHost: models.Gitlab,
+ VcsRequestType: "Merge Request",
GitlabCommonMarkSupport: true,
Output: strings.Repeat("line\n", 13) + "No changes. Infrastructure is up-to-date.",
ShouldWrap: true,
},
{
- VCSHost: models.BitbucketCloud,
- Output: strings.Repeat("line\n", 1),
- ShouldWrap: false,
+ VCSHost: models.BitbucketCloud,
+ VcsRequestType: "Pull Request",
+ Output: strings.Repeat("line\n", 1),
+ ShouldWrap: false,
},
{
- VCSHost: models.BitbucketCloud,
- Output: strings.Repeat("line\n", 13),
- ShouldWrap: false,
+ VCSHost: models.BitbucketCloud,
+ VcsRequestType: "Pull Request",
+ Output: strings.Repeat("line\n", 13),
+ ShouldWrap: false,
},
{
- VCSHost: models.BitbucketServer,
- Output: strings.Repeat("line\n", 1),
- ShouldWrap: false,
+ VCSHost: models.BitbucketServer,
+ VcsRequestType: "Pull Request",
+ Output: strings.Repeat("line\n", 1),
+ ShouldWrap: false,
},
{
- VCSHost: models.BitbucketServer,
- Output: strings.Repeat("line\n", 13),
- ShouldWrap: false,
+ VCSHost: models.BitbucketServer,
+ VcsRequestType: "Pull Request",
+ Output: strings.Repeat("line\n", 13),
+ ShouldWrap: false,
},
}
for _, c := range cases {
- for _, cmd := range []command.Name{command.Plan, command.Apply} {
- t.Run(fmt.Sprintf("%s_%s_%v", c.VCSHost.String(), cmd.String(), c.ShouldWrap),
+ for _, cmdName := range []command.Name{command.Plan, command.Apply} {
+ t.Run(fmt.Sprintf("%s_%s_%v", c.VCSHost.String(), cmdName.String(), c.ShouldWrap),
func(t *testing.T) {
mr := events.NewMarkdownRenderer(
c.GitlabCommonMarkSupport, // gitlabSupportsCommonMark
@@ -1598,8 +1929,22 @@ func TestRenderProjectResults_WrapSingleProject(t *testing.T) {
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: c.VCSHost,
+ },
+ },
+ },
+ }
+
var pr command.ProjectResult
- switch cmd {
+ switch cmdName {
case command.Plan:
pr = command.ProjectResult{
RepoRelDir: ".",
@@ -1618,58 +1963,84 @@ func TestRenderProjectResults_WrapSingleProject(t *testing.T) {
ApplySuccess: c.Output,
}
}
- rendered := mr.Render(command.Result{
+ res := command.Result{
ProjectResults: []command.ProjectResult{pr},
- }, cmd, "", "log", false, c.VCSHost)
+ }
+ cmd := &events.CommentCommand{
+ Name: cmdName,
+ Verbose: false,
+ }
+ rendered := mr.Render(ctx, res, cmd)
// Check result.
var exp string
- switch cmd {
+ switch cmdName {
case command.Plan:
if c.ShouldWrap {
- exp = `Ran Plan for dir: $.$ workspace: $default$
+ exp = `
+Ran Plan for dir: $.$ workspace: $default$
Show Output
$$$diff
` + strings.TrimSpace(c.Output) + `
$$$
+
* :arrow_forward: To **apply** this plan, comment:
- * $applycmd$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ applycmd
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $replancmd$
-
+ $$$shell
+ replancmd
+ $$$
No changes. Infrastructure is up-to-date.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$`
+* :fast_forward: To **apply** all unapplied plans from this ` + c.VcsRequestType + `, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this ` + c.VcsRequestType + `, comment:
+ $$$shell
+ atlantis unlock
+ $$$
+`
} else {
- exp = `Ran Plan for dir: $.$ workspace: $default$
+ exp = `
+Ran Plan for dir: $.$ workspace: $default$
$$$diff
` + strings.TrimSpace(c.Output) + `
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $applycmd$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ applycmd
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $replancmd$
+ $$$shell
+ replancmd
+ $$$
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$`
+* :fast_forward: To **apply** all unapplied plans from this ` + c.VcsRequestType + `, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this ` + c.VcsRequestType + `, comment:
+ $$$shell
+ atlantis unlock
+ $$$
+`
}
case command.Apply:
if c.ShouldWrap {
- exp = `Ran Apply for dir: $.$ workspace: $default$
+ exp = `
+Ran Apply for dir: $.$ workspace: $default$
Show Output
@@ -1677,13 +2048,16 @@ $$$diff
` + strings.TrimSpace(c.Output) + `
$$$
- `
+
+`
} else {
- exp = `Ran Apply for dir: $.$ workspace: $default$
+ exp = `
+Ran Apply for dir: $.$ workspace: $default$
$$$diff
` + strings.TrimSpace(c.Output) + `
-$$$`
+$$$
+`
}
}
@@ -1705,8 +2079,21 @@ func TestRenderProjectResults_MultiProjectApplyWrapped(t *testing.T) {
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
tfOut := strings.Repeat("line\n", 13)
- rendered := mr.Render(command.Result{
+ res := command.Result{
ProjectResults: []command.ProjectResult{
{
RepoRelDir: ".",
@@ -1719,11 +2106,18 @@ func TestRenderProjectResults_MultiProjectApplyWrapped(t *testing.T) {
ApplySuccess: tfOut,
},
},
- }, command.Apply, "", "log", false, models.Github)
- exp := `Ran Apply for 2 projects:
+ }
+ cmd := &events.CommentCommand{
+ Name: command.Apply,
+ Verbose: false,
+ }
+ rendered := mr.Render(ctx, res, cmd)
+ exp := `
+Ran Apply for 2 projects:
1. dir: $.$ workspace: $staging$
1. dir: $.$ workspace: $production$
+---
### 1. dir: $.$ workspace: $staging$
Show Output
@@ -1764,8 +2158,21 @@ func TestRenderProjectResults_MultiProjectPlanWrapped(t *testing.T) {
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
tfOut := strings.Repeat("line\n", 13) + "Plan: 1 to add, 0 to change, 0 to destroy."
- rendered := mr.Render(command.Result{
+ res := command.Result{
ProjectResults: []command.ProjectResult{
{
RepoRelDir: ".",
@@ -1788,11 +2195,18 @@ func TestRenderProjectResults_MultiProjectPlanWrapped(t *testing.T) {
},
},
},
- }, command.Plan, "", "log", false, models.Github)
- exp := `Ran Plan for 2 projects:
+ }
+ cmd := &events.CommentCommand{
+ Name: command.Plan,
+ Verbose: false,
+ }
+ rendered := mr.Render(ctx, res, cmd)
+ exp := `
+Ran Plan for 2 projects:
1. dir: $.$ workspace: $staging$
1. dir: $.$ workspace: $production$
+---
### 1. dir: $.$ workspace: $staging$
Show Output
@@ -1800,13 +2214,17 @@ func TestRenderProjectResults_MultiProjectPlanWrapped(t *testing.T) {
$$$diff
` + tfOut + `
$$$
+
* :arrow_forward: To **apply** this plan, comment:
- * $staging-apply-cmd$
-* :put_litter_in_its_place: To **delete** this plan click [here](staging-lock-url)
+ $$$shell
+ staging-apply-cmd
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](staging-lock-url)
* :repeat: To **plan** this project again, comment:
- * $staging-replan-cmd$
-
+ $$$shell
+ staging-replan-cmd
+ $$$
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -1816,13 +2234,17 @@ Plan: 1 to add, 0 to change, 0 to destroy.
$$$diff
` + tfOut + `
$$$
+
* :arrow_forward: To **apply** this plan, comment:
- * $production-apply-cmd$
-* :put_litter_in_its_place: To **delete** this plan click [here](production-lock-url)
+ $$$shell
+ production-apply-cmd
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](production-lock-url)
* :repeat: To **plan** this project again, comment:
- * $production-replan-cmd$
-
+ $$$shell
+ production-replan-cmd
+ $$$
Plan: 1 to add, 0 to change, 0 to destroy.
---
@@ -1830,10 +2252,14 @@ Plan: 1 to add, 0 to change, 0 to destroy.
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`
Equals(t, normalize(exp), normalize(rendered))
}
@@ -1842,11 +2268,11 @@ Plan: 1 to add, 0 to change, 0 to destroy.
// all the plans as a result.
func TestRenderProjectResults_PlansDeleted(t *testing.T) {
cases := map[string]struct {
- cr command.Result
+ res command.Result
exp string
}{
"one failure": {
- cr: command.Result{
+ res: command.Result{
ProjectResults: []command.ProjectResult{
{
RepoRelDir: ".",
@@ -1856,12 +2282,14 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) {
},
PlansDeleted: true,
},
- exp: `Ran Plan for dir: $.$ workspace: $staging$
+ exp: `
+Ran Plan for dir: $.$ workspace: $staging$
-**Plan Failed**: failure`,
+**Plan Failed**: failure
+`,
},
"two failures": {
- cr: command.Result{
+ res: command.Result{
ProjectResults: []command.ProjectResult{
{
RepoRelDir: ".",
@@ -1876,10 +2304,12 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) {
},
PlansDeleted: true,
},
- exp: `Ran Plan for 2 projects:
+ exp: `
+Ran Plan for 2 projects:
1. dir: $.$ workspace: $staging$
1. dir: $.$ workspace: $production$
+---
### 1. dir: $.$ workspace: $staging$
**Plan Failed**: failure
@@ -1895,7 +2325,7 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) {
`,
},
"one failure, one success": {
- cr: command.Result{
+ res: command.Result{
ProjectResults: []command.ProjectResult{
{
RepoRelDir: ".",
@@ -1915,10 +2345,12 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) {
},
PlansDeleted: true,
},
- exp: `Ran Plan for 2 projects:
+ exp: `
+Ran Plan for 2 projects:
1. dir: $.$ workspace: $staging$
1. dir: $.$ workspace: $production$
+---
### 1. dir: $.$ workspace: $staging$
**Plan Failed**: failure
@@ -1952,7 +2384,24 @@ This plan was not saved because one or more projects failed and automerge requir
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
- rendered := mr.Render(c.cr, command.Plan, "", "log", false, models.Github)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
+ cmd := &events.CommentCommand{
+ Name: command.Plan,
+ Verbose: false,
+ }
+ rendered := mr.Render(ctx, c.res, cmd)
Equals(t, normalize(c.exp), normalize(rendered))
})
}
@@ -1972,7 +2421,7 @@ func TestRenderProjectResultsWithRepoLockingDisabled(t *testing.T) {
command.Plan,
[]command.ProjectResult{},
models.Github,
- "Ran Plan for 0 projects:\n\n\n",
+ "Ran Plan for 0 projects:\n\n",
},
{
"single successful plan",
@@ -1990,22 +2439,31 @@ func TestRenderProjectResultsWithRepoLockingDisabled(t *testing.T) {
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -2025,24 +2483,32 @@ $$$
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
-
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
:twisted_rightwards_arrows: Upstream was modified, a new merge was performed.
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -2062,22 +2528,31 @@ $$$
},
},
models.Github,
- `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -2091,7 +2566,8 @@ $$$
},
},
models.Github,
- `Ran Apply for dir: $path$ workspace: $workspace$
+ `
+Ran Apply for dir: $path$ workspace: $workspace$
$$$diff
success
@@ -2110,7 +2586,8 @@ $$$
},
},
models.Github,
- `Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$
+ `
+Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
success
@@ -2144,10 +2621,12 @@ $$$
},
},
models.Github,
- `Ran Plan for 2 projects:
+ `
+Ran Plan for 2 projects:
1. dir: $path$ workspace: $workspace$
1. project: $projectname$ dir: $path2$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -2155,9 +2634,13 @@ terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 2. project: $projectname$ dir: $path2$ workspace: $workspace$
@@ -2166,19 +2649,27 @@ terraform-output2
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path2 -w workspace$
+ $$$shell
+ atlantis apply -d path2 -w workspace
+ $$$
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path2 -w workspace$
+ $$$shell
+ atlantis plan -d path2 -w workspace
+ $$$
---
### Plan Summary
2 projects, 2 with changes, 0 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -2198,10 +2689,12 @@ $$$
},
},
models.Github,
- `Ran Apply for 2 projects:
+ `
+Ran Apply for 2 projects:
1. project: $projectname$ dir: $path$ workspace: $workspace$
1. dir: $path2$ workspace: $workspace$
+---
### 1. project: $projectname$ dir: $path$ workspace: $workspace$
$$$diff
@@ -2231,7 +2724,8 @@ $$$
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
**Plan Error**
$$$
@@ -2250,7 +2744,8 @@ $$$
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
**Plan Failed**: failure
`,
@@ -2282,11 +2777,13 @@ $$$
},
},
models.Github,
- `Ran Plan for 3 projects:
+ `
+Ran Plan for 3 projects:
1. dir: $path$ workspace: $workspace$
1. dir: $path2$ workspace: $workspace$
1. project: $projectname$ dir: $path3$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -2294,9 +2791,13 @@ terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 2. dir: $path2$ workspace: $workspace$
@@ -2314,10 +2815,14 @@ $$$
3 projects, 1 with changes, 0 with no changes, 2 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -2341,11 +2846,13 @@ $$$
},
},
models.Github,
- `Ran Apply for 3 projects:
+ `
+Ran Apply for 3 projects:
1. dir: $path$ workspace: $workspace$
1. dir: $path2$ workspace: $workspace$
1. dir: $path3$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -2390,11 +2897,13 @@ $$$
},
},
models.Github,
- `Ran Apply for 3 projects:
+ `
+Ran Apply for 3 projects:
1. dir: $path$ workspace: $workspace$
1. dir: $path2$ workspace: $workspace$
1. dir: $path3$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -2431,6 +2940,19 @@ $$$
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
for _, c := range cases {
t.Run(c.Description, func(t *testing.T) {
res := command.Result{
@@ -2438,11 +2960,17 @@ $$$
}
for _, verbose := range []bool{true, false} {
t.Run(c.Description, func(t *testing.T) {
- s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost)
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ Verbose: verbose,
+ }
+ s := r.Render(ctx, res, cmd)
if !verbose {
Equals(t, normalize(c.Expected), normalize(s))
} else {
- Equals(t, normalize(c.Expected+"\nLog \n \n\n```\nlog```\n
"), normalize(s))
+ log := fmt.Sprintf("[INFO] %s", logText)
+ Equals(t, normalize(c.Expected+
+ fmt.Sprintf("Log \n\n\n```\n%s\n```\n
", log)), normalize(s))
}
})
}
@@ -2450,7 +2978,145 @@ $$$
}
}
-const tfOutput = `An execution plan has been generated and is shown below.
+func TestRenderProjectResultsWithGitLab(t *testing.T) {
+ cases := []struct {
+ Description string
+ Command command.Name
+ ProjectResults []command.ProjectResult
+ VCSHost models.VCSHostType
+ Expected string
+ }{
+ {
+ "multiple successful plans",
+ command.Plan,
+ []command.ProjectResult{
+ {
+ Workspace: "workspace",
+ RepoRelDir: "path",
+ PlanSuccess: &models.PlanSuccess{
+ TerraformOutput: "terraform-output",
+ LockURL: "lock-url",
+ ApplyCmd: "atlantis apply -d path -w workspace",
+ RePlanCmd: "atlantis plan -d path -w workspace",
+ },
+ },
+ {
+ Workspace: "workspace",
+ RepoRelDir: "path2",
+ ProjectName: "projectname",
+ PlanSuccess: &models.PlanSuccess{
+ TerraformOutput: "terraform-output2",
+ LockURL: "lock-url2",
+ ApplyCmd: "atlantis apply -d path2 -w workspace",
+ RePlanCmd: "atlantis plan -d path2 -w workspace",
+ },
+ },
+ },
+ models.Gitlab,
+ `
+Ran Plan for 2 projects:
+
+1. dir: $path$ workspace: $workspace$
+1. project: $projectname$ dir: $path2$ workspace: $workspace$
+---
+
+### 1. dir: $path$ workspace: $workspace$
+$$$diff
+terraform-output
+$$$
+
+* :arrow_forward: To **apply** this plan, comment:
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :repeat: To **plan** this project again, comment:
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
+
+---
+### 2. project: $projectname$ dir: $path2$ workspace: $workspace$
+$$$diff
+terraform-output2
+$$$
+
+* :arrow_forward: To **apply** this plan, comment:
+ $$$shell
+ atlantis apply -d path2 -w workspace
+ $$$
+* :repeat: To **plan** this project again, comment:
+ $$$shell
+ atlantis plan -d path2 -w workspace
+ $$$
+
+---
+### Plan Summary
+
+2 projects, 2 with changes, 0 with no changes, 0 failed
+
+* :fast_forward: To **apply** all unapplied plans from this Merge Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Merge Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
+`,
+ },
+ }
+
+ r := events.NewMarkdownRenderer(
+ false, // gitlabSupportsCommonMark
+ false, // disableApplyAll
+ false, // disableApply
+ false, // disableMarkdownFolding
+ true, // disableRepoLocking
+ false, // enableDiffMarkdownFormat
+ "", // MarkdownTemplateOverridesDir
+ "atlantis", // executableName
+ false, // hideUnchangedPlanComments
+ )
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+ for _, c := range cases {
+ t.Run(c.Description, func(t *testing.T) {
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: c.VCSHost,
+ },
+ },
+ },
+ }
+ res := command.Result{
+ ProjectResults: c.ProjectResults,
+ }
+ for _, verbose := range []bool{true, false} {
+ t.Run(c.Description, func(t *testing.T) {
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ Verbose: verbose,
+ }
+ s := r.Render(ctx, res, cmd)
+ if !verbose {
+ Equals(t, normalize(c.Expected), normalize(s))
+ } else {
+ log := fmt.Sprintf("[INFO] %s", logText)
+ Equals(t, normalize(c.Expected)+
+ fmt.Sprintf("\nLog \n\n\n```\n%s\n```\n
", log), normalize(s))
+ }
+ })
+ }
+ })
+ }
+}
+
+const tfOutput = `
+An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
~ update in-place
-/+ destroy and then create replacement
@@ -2657,7 +3323,8 @@ var cases = []struct {
},
},
models.Github,
- `Ran Plan for dir: $path$ workspace: $workspace$
+ `
+Ran Plan for dir: $path$ workspace: $workspace$
Show Output
@@ -2845,11 +3512,13 @@ Terraform will perform the following actions:
Plan: 1 to add, 2 to change, 1 to destroy.
$$$
+
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
-
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
Plan: 1 to add, 2 to change, 1 to destroy.
`,
},
@@ -2867,19 +3536,38 @@ func TestRenderProjectResultsWithEnableDiffMarkdownFormat(t *testing.T) {
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
for _, c := range cases {
t.Run(c.Description, func(t *testing.T) {
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ },
+ },
+ },
+ }
res := command.Result{
ProjectResults: c.ProjectResults,
}
for _, verbose := range []bool{true, false} {
t.Run(c.Description, func(t *testing.T) {
- s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost)
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ Verbose: verbose,
+ }
+ s := r.Render(ctx, res, cmd)
if !verbose {
Equals(t, normalize(c.Expected), normalize(s))
} else {
- Equals(t, normalize(c.Expected+"\nLog \n \n\n```\nlog```\n
"), normalize(s))
+ log := fmt.Sprintf("[INFO] %s", logText)
+ Equals(t, normalize(c.Expected)+
+ fmt.Sprintf("\nLog \n\n\n```\n%s\n```\n
", log), normalize(s))
}
})
}
@@ -2903,17 +3591,34 @@ func BenchmarkRenderProjectResultsWithEnableDiffMarkdownFormat(b *testing.B) {
"atlantis", // executableName
false, // hideUnchangedPlanComments
)
+ logger := logging.NewNoopLogger(b).WithHistory()
+ logText := "log"
+ logger.Info(logText)
for _, c := range cases {
b.Run(c.Description, func(b *testing.B) {
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: c.VCSHost,
+ },
+ },
+ },
+ }
res := command.Result{
ProjectResults: c.ProjectResults,
}
for _, verbose := range []bool{true, false} {
b.Run(fmt.Sprintf("verbose %t", verbose), func(b *testing.B) {
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ Verbose: verbose,
+ }
b.ReportAllocs()
for i := 0; i < b.N; i++ {
- render = r.Render(res, c.Command, "", "log", verbose, c.VCSHost)
+ render = r.Render(ctx, res, cmd)
}
Render = render
})
@@ -2970,11 +3675,13 @@ func TestRenderProjectResultsHideUnchangedPlans(t *testing.T) {
},
},
models.Github,
- `Ran Plan for 3 projects:
+ `
+Ran Plan for 3 projects:
1. dir: $path$ workspace: $workspace$
1. project: $projectname$ dir: $path2$ workspace: $workspace$
1. project: $projectname2$ dir: $path3$ workspace: $workspace$
+---
### 1. dir: $path$ workspace: $workspace$
$$$diff
@@ -2982,10 +3689,14 @@ terraform-output
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url)
+ $$$shell
+ atlantis apply -d path -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path -w workspace$
+ $$$shell
+ atlantis plan -d path -w workspace
+ $$$
---
### 3. project: $projectname2$ dir: $path3$ workspace: $workspace$
@@ -2994,20 +3705,28 @@ terraform-output3
$$$
* :arrow_forward: To **apply** this plan, comment:
- * $atlantis apply -d path3 -w workspace$
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url3)
+ $$$shell
+ atlantis apply -d path3 -w workspace
+ $$$
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url3)
* :repeat: To **plan** this project again, comment:
- * $atlantis plan -d path3 -w workspace$
+ $$$shell
+ atlantis plan -d path3 -w workspace
+ $$$
---
### Plan Summary
3 projects, 2 with changes, 1 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
{
@@ -3049,37 +3768,64 @@ $$$
},
},
models.Github,
- `Ran Plan for 3 projects:
+ `
+Ran Plan for 3 projects:
1. dir: $path$ workspace: $workspace$
1. project: $projectname$ dir: $path2$ workspace: $workspace$
1. project: $projectname2$ dir: $path3$ workspace: $workspace$
+---
### Plan Summary
3 projects, 0 with changes, 3 with no changes, 0 failed
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * $atlantis apply$
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * $atlantis unlock$
+* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment:
+ $$$shell
+ atlantis apply
+ $$$
+* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment:
+ $$$shell
+ atlantis unlock
+ $$$
`,
},
}
r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", true)
+ logger := logging.NewNoopLogger(t).WithHistory()
+ logText := "log"
+ logger.Info(logText)
+
for _, c := range cases {
t.Run(c.Description, func(t *testing.T) {
+ ctx := &command.Context{
+ Log: logger,
+ Pull: models.PullRequest{
+ BaseRepo: models.Repo{
+ VCSHost: models.VCSHost{
+ Type: c.VCSHost,
+ },
+ },
+ },
+ }
res := command.Result{
ProjectResults: c.ProjectResults,
}
for _, verbose := range []bool{true, false} {
t.Run(c.Description, func(t *testing.T) {
- s := r.Render(res, c.Command, c.SubCommand, "log", verbose, c.VCSHost)
+ cmd := &events.CommentCommand{
+ Name: c.Command,
+ SubName: c.SubCommand,
+ Verbose: verbose,
+ }
+ s := r.Render(ctx, res, cmd)
if !verbose {
Equals(t, normalize(c.Expected), normalize(s))
} else {
- Equals(t, normalize(c.Expected+"\nLog \n \n\n```\nlog```\n
"), normalize(s))
+ log := fmt.Sprintf("[INFO] %s", logText)
+ Equals(t, normalize(c.Expected)+
+ fmt.Sprintf("\nLog \n\n\n```\n%s\n```\n
", log), normalize(s))
}
})
}
diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go
index 30b344ea3a..c2e070cfed 100644
--- a/server/events/mock_workingdir_test.go
+++ b/server/events/mock_workingdir_test.go
@@ -6,6 +6,7 @@ package events
import (
pegomock "github.com/petergtz/pegomock/v4"
models "github.com/runatlantis/atlantis/server/events/models"
+ logging "github.com/runatlantis/atlantis/server/logging"
"reflect"
"time"
)
@@ -25,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir {
func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh }
func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail }
-func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) {
+func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{headRepo, p, workspace}
+ params := []pegomock.Param{logger, headRepo, p, workspace}
result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
var ret0 string
var ret1 bool
@@ -48,11 +49,11 @@ func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, wo
return ret0, ret1, ret2
}
-func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error {
+func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{r, p}
+ params := []pegomock.Param{logger, r, p}
result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
var ret0 error
if len(result) != 0 {
@@ -63,11 +64,11 @@ func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error {
return ret0
}
-func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error {
+func (mock *MockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{r, p, workspace}
+ params := []pegomock.Param{logger, r, p, workspace}
result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteForWorkspace", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
var ret0 error
if len(result) != 0 {
@@ -78,11 +79,11 @@ func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullReque
return ret0
}
-func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error {
+func (mock *MockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{r, p, workspace, path, projectName}
+ params := []pegomock.Param{logger, r, p, workspace, path, projectName}
result := pegomock.GetGenericMockFrom(mock).Invoke("DeletePlan", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
var ret0 error
if len(result) != 0 {
@@ -93,11 +94,11 @@ func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, work
return ret0
}
-func (mock *MockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) {
+func (mock *MockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{r, p, workspace}
+ params := []pegomock.Param{logger, r, p, workspace}
result := pegomock.GetGenericMockFrom(mock).Invoke("GetGitUntrackedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
var ret0 []string
var ret1 error
@@ -150,11 +151,11 @@ func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, w
return ret0, ret1
}
-func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool {
+func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) bool {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{cloneDir}
+ params := []pegomock.Param{logger, cloneDir}
result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()})
var ret0 bool
if len(result) != 0 {
@@ -210,8 +211,8 @@ type VerifierMockWorkingDir struct {
timeout time.Duration
}
-func (verifier *VerifierMockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification {
- params := []pegomock.Param{headRepo, p, workspace}
+func (verifier *VerifierMockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification {
+ params := []pegomock.Param{logger, headRepo, p, workspace}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout)
return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -221,32 +222,36 @@ type MockWorkingDir_Clone_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
- headRepo, p, workspace := c.GetAllCapturedArguments()
- return headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1]
+func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) {
+ logger, headRepo, p, workspace := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1]
}
-func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
+func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
}
- _param2 = make([]string, len(c.methodInvocations))
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
for u, param := range params[2] {
- _param2[u] = param.(string)
+ _param2[u] = param.(models.PullRequest)
+ }
+ _param3 = make([]string, len(c.methodInvocations))
+ for u, param := range params[3] {
+ _param3[u] = param.(string)
}
}
return
}
-func (verifier *VerifierMockWorkingDir) Delete(r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification {
- params := []pegomock.Param{r, p}
+func (verifier *VerifierMockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification {
+ params := []pegomock.Param{logger, r, p}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params, verifier.timeout)
return &MockWorkingDir_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -256,28 +261,32 @@ type MockWorkingDir_Delete_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) {
- r, p := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1]
+func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) {
+ logger, r, p := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], r[len(r)-1], p[len(p)-1]
}
-func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) {
+func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
+ }
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
+ for u, param := range params[2] {
+ _param2[u] = param.(models.PullRequest)
}
}
return
}
-func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification {
- params := []pegomock.Param{r, p, workspace}
+func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification {
+ params := []pegomock.Param{logger, r, p, workspace}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteForWorkspace", params, verifier.timeout)
return &MockWorkingDir_DeleteForWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -287,32 +296,36 @@ type MockWorkingDir_DeleteForWorkspace_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
- r, p, workspace := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
+func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) {
+ logger, r, p, workspace := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
}
-func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
+func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
}
- _param2 = make([]string, len(c.methodInvocations))
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
for u, param := range params[2] {
- _param2[u] = param.(string)
+ _param2[u] = param.(models.PullRequest)
+ }
+ _param3 = make([]string, len(c.methodInvocations))
+ for u, param := range params[3] {
+ _param3[u] = param.(string)
}
}
return
}
-func (verifier *VerifierMockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification {
- params := []pegomock.Param{r, p, workspace, path, projectName}
+func (verifier *VerifierMockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification {
+ params := []pegomock.Param{logger, r, p, workspace, path, projectName}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeletePlan", params, verifier.timeout)
return &MockWorkingDir_DeletePlan_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -322,25 +335,25 @@ type MockWorkingDir_DeletePlan_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string, string, string) {
- r, p, workspace, path, projectName := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1]
+func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, string, string) {
+ logger, r, p, workspace, path, projectName := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1]
}
-func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string, _param3 []string, _param4 []string) {
+func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _param4 []string, _param5 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
}
- _param2 = make([]string, len(c.methodInvocations))
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
for u, param := range params[2] {
- _param2[u] = param.(string)
+ _param2[u] = param.(models.PullRequest)
}
_param3 = make([]string, len(c.methodInvocations))
for u, param := range params[3] {
@@ -350,12 +363,16 @@ func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments(
for u, param := range params[4] {
_param4[u] = param.(string)
}
+ _param5 = make([]string, len(c.methodInvocations))
+ for u, param := range params[5] {
+ _param5[u] = param.(string)
+ }
}
return
}
-func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification {
- params := []pegomock.Param{r, p, workspace}
+func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification {
+ params := []pegomock.Param{logger, r, p, workspace}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetGitUntrackedFiles", params, verifier.timeout)
return &MockWorkingDir_GetGitUntrackedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -365,25 +382,29 @@ type MockWorkingDir_GetGitUntrackedFiles_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
- r, p, workspace := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
+func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) {
+ logger, r, p, workspace := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
}
-func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
+func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
}
- _param2 = make([]string, len(c.methodInvocations))
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
for u, param := range params[2] {
- _param2[u] = param.(string)
+ _param2[u] = param.(models.PullRequest)
+ }
+ _param3 = make([]string, len(c.methodInvocations))
+ for u, param := range params[3] {
+ _param3[u] = param.(string)
}
}
return
@@ -455,8 +476,8 @@ func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArgumen
return
}
-func (verifier *VerifierMockWorkingDir) HasDiverged(cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification {
- params := []pegomock.Param{cloneDir}
+func (verifier *VerifierMockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification {
+ params := []pegomock.Param{logger, cloneDir}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HasDiverged", params, verifier.timeout)
return &MockWorkingDir_HasDiverged_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -466,17 +487,21 @@ type MockWorkingDir_HasDiverged_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() string {
- cloneDir := c.GetAllCapturedArguments()
- return cloneDir[len(cloneDir)-1]
+func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) {
+ logger, cloneDir := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], cloneDir[len(cloneDir)-1]
}
-func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []string) {
+func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]string, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(string)
+ _param0[u] = param.(logging.SimpleLogging)
+ }
+ _param1 = make([]string, len(c.methodInvocations))
+ for u, param := range params[1] {
+ _param1[u] = param.(string)
}
}
return
diff --git a/server/events/mocks/mock_delete_lock_command.go b/server/events/mocks/mock_delete_lock_command.go
index ce1afd3b72..a8511f28c8 100644
--- a/server/events/mocks/mock_delete_lock_command.go
+++ b/server/events/mocks/mock_delete_lock_command.go
@@ -6,6 +6,7 @@ package mocks
import (
pegomock "github.com/petergtz/pegomock/v4"
models "github.com/runatlantis/atlantis/server/events/models"
+ logging "github.com/runatlantis/atlantis/server/logging"
"reflect"
"time"
)
@@ -25,11 +26,11 @@ func NewMockDeleteLockCommand(options ...pegomock.Option) *MockDeleteLockCommand
func (mock *MockDeleteLockCommand) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh }
func (mock *MockDeleteLockCommand) FailHandler() pegomock.FailHandler { return mock.fail }
-func (mock *MockDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, error) {
+func (mock *MockDeleteLockCommand) DeleteLock(logger logging.SimpleLogging, id string) (*models.ProjectLock, error) {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockDeleteLockCommand().")
}
- params := []pegomock.Param{id}
+ params := []pegomock.Param{logger, id}
result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteLock", params, []reflect.Type{reflect.TypeOf((**models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
var ret0 *models.ProjectLock
var ret1 error
@@ -44,11 +45,11 @@ func (mock *MockDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, e
return ret0, ret1
}
-func (mock *MockDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) (int, error) {
+func (mock *MockDeleteLockCommand) DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) (int, error) {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockDeleteLockCommand().")
}
- params := []pegomock.Param{repoFullName, pullNum}
+ params := []pegomock.Param{logger, repoFullName, pullNum}
result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteLocksByPull", params, []reflect.Type{reflect.TypeOf((*int)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
var ret0 int
var ret1 error
@@ -100,8 +101,8 @@ type VerifierMockDeleteLockCommand struct {
timeout time.Duration
}
-func (verifier *VerifierMockDeleteLockCommand) DeleteLock(id string) *MockDeleteLockCommand_DeleteLock_OngoingVerification {
- params := []pegomock.Param{id}
+func (verifier *VerifierMockDeleteLockCommand) DeleteLock(logger logging.SimpleLogging, id string) *MockDeleteLockCommand_DeleteLock_OngoingVerification {
+ params := []pegomock.Param{logger, id}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteLock", params, verifier.timeout)
return &MockDeleteLockCommand_DeleteLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -111,24 +112,28 @@ type MockDeleteLockCommand_DeleteLock_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetCapturedArguments() string {
- id := c.GetAllCapturedArguments()
- return id[len(id)-1]
+func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) {
+ logger, id := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], id[len(id)-1]
}
-func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetAllCapturedArguments() (_param0 []string) {
+func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]string, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(string)
+ _param0[u] = param.(logging.SimpleLogging)
+ }
+ _param1 = make([]string, len(c.methodInvocations))
+ for u, param := range params[1] {
+ _param1[u] = param.(string)
}
}
return
}
-func (verifier *VerifierMockDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification {
- params := []pegomock.Param{repoFullName, pullNum}
+func (verifier *VerifierMockDeleteLockCommand) DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification {
+ params := []pegomock.Param{logger, repoFullName, pullNum}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteLocksByPull", params, verifier.timeout)
return &MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -138,21 +143,25 @@ type MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetCapturedArguments() (string, int) {
- repoFullName, pullNum := c.GetAllCapturedArguments()
- return repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1]
+func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string, int) {
+ logger, repoFullName, pullNum := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1]
}
-func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []int) {
+func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string, _param2 []int) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]string, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(string)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]int, len(c.methodInvocations))
+ _param1 = make([]string, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(int)
+ _param1[u] = param.(string)
+ }
+ _param2 = make([]int, len(c.methodInvocations))
+ for u, param := range params[2] {
+ _param2[u] = param.(int)
}
}
return
diff --git a/server/events/mocks/mock_event_parsing.go b/server/events/mocks/mock_event_parsing.go
index 22690d2e3e..ad7a75c252 100644
--- a/server/events/mocks/mock_event_parsing.go
+++ b/server/events/mocks/mock_event_parsing.go
@@ -4,6 +4,8 @@
package mocks
import (
+ gitea "code.gitea.io/sdk/gitea"
+ gitea0 "github.com/runatlantis/atlantis/server/events/vcs/gitea"
github "github.com/google/go-github/v59/github"
azuredevops "github.com/mcdafydd/go-azuredevops/azuredevops"
pegomock "github.com/petergtz/pegomock/v4"
@@ -291,6 +293,95 @@ func (mock *MockEventParsing) ParseBitbucketServerPullEvent(body []byte) (models
return ret0, ret1, ret2, ret3, ret4
}
+func (mock *MockEventParsing) ParseGiteaIssueCommentEvent(event gitea0.GiteaIssueCommentPayload) (models.Repo, models.User, int, error) {
+ if mock == nil {
+ panic("mock must not be nil. Use myMock := NewMockEventParsing().")
+ }
+ params := []pegomock.Param{event}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGiteaIssueCommentEvent", params, []reflect.Type{reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*int)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 models.Repo
+ var ret1 models.User
+ var ret2 int
+ var ret3 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(models.Repo)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(models.User)
+ }
+ if result[2] != nil {
+ ret2 = result[2].(int)
+ }
+ if result[3] != nil {
+ ret3 = result[3].(error)
+ }
+ }
+ return ret0, ret1, ret2, ret3
+}
+
+func (mock *MockEventParsing) ParseGiteaPull(pull *gitea.PullRequest) (models.PullRequest, models.Repo, models.Repo, error) {
+ if mock == nil {
+ panic("mock must not be nil. Use myMock := NewMockEventParsing().")
+ }
+ params := []pegomock.Param{pull}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGiteaPull", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 models.PullRequest
+ var ret1 models.Repo
+ var ret2 models.Repo
+ var ret3 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(models.PullRequest)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(models.Repo)
+ }
+ if result[2] != nil {
+ ret2 = result[2].(models.Repo)
+ }
+ if result[3] != nil {
+ ret3 = result[3].(error)
+ }
+ }
+ return ret0, ret1, ret2, ret3
+}
+
+func (mock *MockEventParsing) ParseGiteaPullRequestEvent(event gitea.PullRequest) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) {
+ if mock == nil {
+ panic("mock must not be nil. Use myMock := NewMockEventParsing().")
+ }
+ params := []pegomock.Param{event}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGiteaPullRequestEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.PullRequestEventType)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 models.PullRequest
+ var ret1 models.PullRequestEventType
+ var ret2 models.Repo
+ var ret3 models.Repo
+ var ret4 models.User
+ var ret5 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(models.PullRequest)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(models.PullRequestEventType)
+ }
+ if result[2] != nil {
+ ret2 = result[2].(models.Repo)
+ }
+ if result[3] != nil {
+ ret3 = result[3].(models.Repo)
+ }
+ if result[4] != nil {
+ ret4 = result[4].(models.User)
+ }
+ if result[5] != nil {
+ ret5 = result[5].(error)
+ }
+ }
+ return ret0, ret1, ret2, ret3, ret4, ret5
+}
+
func (mock *MockEventParsing) ParseGithubIssueCommentEvent(logger logging.SimpleLogging, comment *github.IssueCommentEvent) (models.Repo, models.User, int, error) {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockEventParsing().")
@@ -818,6 +909,87 @@ func (c *MockEventParsing_ParseBitbucketServerPullEvent_OngoingVerification) Get
return
}
+func (verifier *VerifierMockEventParsing) ParseGiteaIssueCommentEvent(event gitea0.GiteaIssueCommentPayload) *MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification {
+ params := []pegomock.Param{event}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGiteaIssueCommentEvent", params, verifier.timeout)
+ return &MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification struct {
+ mock *MockEventParsing
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification) GetCapturedArguments() gitea0.GiteaIssueCommentPayload {
+ event := c.GetAllCapturedArguments()
+ return event[len(event)-1]
+}
+
+func (c *MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []gitea0.GiteaIssueCommentPayload) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]gitea0.GiteaIssueCommentPayload, len(c.methodInvocations))
+ for u, param := range params[0] {
+ _param0[u] = param.(gitea0.GiteaIssueCommentPayload)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierMockEventParsing) ParseGiteaPull(pull *gitea.PullRequest) *MockEventParsing_ParseGiteaPull_OngoingVerification {
+ params := []pegomock.Param{pull}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGiteaPull", params, verifier.timeout)
+ return &MockEventParsing_ParseGiteaPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type MockEventParsing_ParseGiteaPull_OngoingVerification struct {
+ mock *MockEventParsing
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *MockEventParsing_ParseGiteaPull_OngoingVerification) GetCapturedArguments() *gitea.PullRequest {
+ pull := c.GetAllCapturedArguments()
+ return pull[len(pull)-1]
+}
+
+func (c *MockEventParsing_ParseGiteaPull_OngoingVerification) GetAllCapturedArguments() (_param0 []*gitea.PullRequest) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]*gitea.PullRequest, len(c.methodInvocations))
+ for u, param := range params[0] {
+ _param0[u] = param.(*gitea.PullRequest)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierMockEventParsing) ParseGiteaPullRequestEvent(event gitea.PullRequest) *MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification {
+ params := []pegomock.Param{event}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGiteaPullRequestEvent", params, verifier.timeout)
+ return &MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification struct {
+ mock *MockEventParsing
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification) GetCapturedArguments() gitea.PullRequest {
+ event := c.GetAllCapturedArguments()
+ return event[len(event)-1]
+}
+
+func (c *MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []gitea.PullRequest) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]gitea.PullRequest, len(c.methodInvocations))
+ for u, param := range params[0] {
+ _param0[u] = param.(gitea.PullRequest)
+ }
+ }
+ return
+}
+
func (verifier *VerifierMockEventParsing) ParseGithubIssueCommentEvent(logger logging.SimpleLogging, comment *github.IssueCommentEvent) *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification {
params := []pegomock.Param{logger, comment}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubIssueCommentEvent", params, verifier.timeout)
diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go
index 55ecc1ca4c..9c162fc4a2 100644
--- a/server/events/mocks/mock_working_dir.go
+++ b/server/events/mocks/mock_working_dir.go
@@ -6,6 +6,7 @@ package mocks
import (
pegomock "github.com/petergtz/pegomock/v4"
models "github.com/runatlantis/atlantis/server/events/models"
+ logging "github.com/runatlantis/atlantis/server/logging"
"reflect"
"time"
)
@@ -25,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir {
func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh }
func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail }
-func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) {
+func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{headRepo, p, workspace}
+ params := []pegomock.Param{logger, headRepo, p, workspace}
result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
var ret0 string
var ret1 bool
@@ -48,11 +49,11 @@ func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, wo
return ret0, ret1, ret2
}
-func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error {
+func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{r, p}
+ params := []pegomock.Param{logger, r, p}
result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
var ret0 error
if len(result) != 0 {
@@ -63,11 +64,11 @@ func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error {
return ret0
}
-func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error {
+func (mock *MockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{r, p, workspace}
+ params := []pegomock.Param{logger, r, p, workspace}
result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteForWorkspace", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
var ret0 error
if len(result) != 0 {
@@ -78,11 +79,11 @@ func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullReque
return ret0
}
-func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error {
+func (mock *MockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{r, p, workspace, path, projectName}
+ params := []pegomock.Param{logger, r, p, workspace, path, projectName}
result := pegomock.GetGenericMockFrom(mock).Invoke("DeletePlan", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
var ret0 error
if len(result) != 0 {
@@ -93,11 +94,11 @@ func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, work
return ret0
}
-func (mock *MockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) {
+func (mock *MockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{r, p, workspace}
+ params := []pegomock.Param{logger, r, p, workspace}
result := pegomock.GetGenericMockFrom(mock).Invoke("GetGitUntrackedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
var ret0 []string
var ret1 error
@@ -150,11 +151,11 @@ func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, w
return ret0, ret1
}
-func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool {
+func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) bool {
if mock == nil {
panic("mock must not be nil. Use myMock := NewMockWorkingDir().")
}
- params := []pegomock.Param{cloneDir}
+ params := []pegomock.Param{logger, cloneDir}
result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()})
var ret0 bool
if len(result) != 0 {
@@ -210,8 +211,8 @@ type VerifierMockWorkingDir struct {
timeout time.Duration
}
-func (verifier *VerifierMockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification {
- params := []pegomock.Param{headRepo, p, workspace}
+func (verifier *VerifierMockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification {
+ params := []pegomock.Param{logger, headRepo, p, workspace}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout)
return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -221,32 +222,36 @@ type MockWorkingDir_Clone_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
- headRepo, p, workspace := c.GetAllCapturedArguments()
- return headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1]
+func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) {
+ logger, headRepo, p, workspace := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1]
}
-func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
+func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
}
- _param2 = make([]string, len(c.methodInvocations))
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
for u, param := range params[2] {
- _param2[u] = param.(string)
+ _param2[u] = param.(models.PullRequest)
+ }
+ _param3 = make([]string, len(c.methodInvocations))
+ for u, param := range params[3] {
+ _param3[u] = param.(string)
}
}
return
}
-func (verifier *VerifierMockWorkingDir) Delete(r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification {
- params := []pegomock.Param{r, p}
+func (verifier *VerifierMockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification {
+ params := []pegomock.Param{logger, r, p}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params, verifier.timeout)
return &MockWorkingDir_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -256,28 +261,32 @@ type MockWorkingDir_Delete_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) {
- r, p := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1]
+func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) {
+ logger, r, p := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], r[len(r)-1], p[len(p)-1]
}
-func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) {
+func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
+ }
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
+ for u, param := range params[2] {
+ _param2[u] = param.(models.PullRequest)
}
}
return
}
-func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification {
- params := []pegomock.Param{r, p, workspace}
+func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification {
+ params := []pegomock.Param{logger, r, p, workspace}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteForWorkspace", params, verifier.timeout)
return &MockWorkingDir_DeleteForWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -287,32 +296,36 @@ type MockWorkingDir_DeleteForWorkspace_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
- r, p, workspace := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
+func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) {
+ logger, r, p, workspace := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
}
-func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
+func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
}
- _param2 = make([]string, len(c.methodInvocations))
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
for u, param := range params[2] {
- _param2[u] = param.(string)
+ _param2[u] = param.(models.PullRequest)
+ }
+ _param3 = make([]string, len(c.methodInvocations))
+ for u, param := range params[3] {
+ _param3[u] = param.(string)
}
}
return
}
-func (verifier *VerifierMockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification {
- params := []pegomock.Param{r, p, workspace, path, projectName}
+func (verifier *VerifierMockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification {
+ params := []pegomock.Param{logger, r, p, workspace, path, projectName}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeletePlan", params, verifier.timeout)
return &MockWorkingDir_DeletePlan_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -322,25 +335,25 @@ type MockWorkingDir_DeletePlan_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string, string, string) {
- r, p, workspace, path, projectName := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1]
+func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, string, string) {
+ logger, r, p, workspace, path, projectName := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1]
}
-func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string, _param3 []string, _param4 []string) {
+func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _param4 []string, _param5 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
}
- _param2 = make([]string, len(c.methodInvocations))
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
for u, param := range params[2] {
- _param2[u] = param.(string)
+ _param2[u] = param.(models.PullRequest)
}
_param3 = make([]string, len(c.methodInvocations))
for u, param := range params[3] {
@@ -350,12 +363,16 @@ func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments(
for u, param := range params[4] {
_param4[u] = param.(string)
}
+ _param5 = make([]string, len(c.methodInvocations))
+ for u, param := range params[5] {
+ _param5[u] = param.(string)
+ }
}
return
}
-func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification {
- params := []pegomock.Param{r, p, workspace}
+func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification {
+ params := []pegomock.Param{logger, r, p, workspace}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetGitUntrackedFiles", params, verifier.timeout)
return &MockWorkingDir_GetGitUntrackedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -365,25 +382,29 @@ type MockWorkingDir_GetGitUntrackedFiles_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
- r, p, workspace := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
+func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) {
+ logger, r, p, workspace := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
}
-func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
+func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]models.Repo, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
+ _param0[u] = param.(logging.SimpleLogging)
}
- _param1 = make([]models.PullRequest, len(c.methodInvocations))
+ _param1 = make([]models.Repo, len(c.methodInvocations))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(models.Repo)
}
- _param2 = make([]string, len(c.methodInvocations))
+ _param2 = make([]models.PullRequest, len(c.methodInvocations))
for u, param := range params[2] {
- _param2[u] = param.(string)
+ _param2[u] = param.(models.PullRequest)
+ }
+ _param3 = make([]string, len(c.methodInvocations))
+ for u, param := range params[3] {
+ _param3[u] = param.(string)
}
}
return
@@ -455,8 +476,8 @@ func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArgumen
return
}
-func (verifier *VerifierMockWorkingDir) HasDiverged(cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification {
- params := []pegomock.Param{cloneDir}
+func (verifier *VerifierMockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification {
+ params := []pegomock.Param{logger, cloneDir}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HasDiverged", params, verifier.timeout)
return &MockWorkingDir_HasDiverged_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -466,17 +487,21 @@ type MockWorkingDir_HasDiverged_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() string {
- cloneDir := c.GetAllCapturedArguments()
- return cloneDir[len(cloneDir)-1]
+func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) {
+ logger, cloneDir := c.GetAllCapturedArguments()
+ return logger[len(logger)-1], cloneDir[len(cloneDir)-1]
}
-func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []string) {
+func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]string, len(c.methodInvocations))
+ _param0 = make([]logging.SimpleLogging, len(c.methodInvocations))
for u, param := range params[0] {
- _param0[u] = param.(string)
+ _param0[u] = param.(logging.SimpleLogging)
+ }
+ _param1 = make([]string, len(c.methodInvocations))
+ for u, param := range params[1] {
+ _param1[u] = param.(string)
}
}
return
diff --git a/server/events/models/models.go b/server/events/models/models.go
index b98d93e554..4ff5bc339d 100644
--- a/server/events/models/models.go
+++ b/server/events/models/models.go
@@ -304,6 +304,7 @@ const (
BitbucketCloud
BitbucketServer
AzureDevops
+ Gitea
)
func (h VCSHostType) String() string {
@@ -318,6 +319,8 @@ func (h VCSHostType) String() string {
return "BitbucketServer"
case AzureDevops:
return "AzureDevops"
+ case Gitea:
+ return "Gitea"
}
return ""
}
@@ -334,6 +337,8 @@ func NewVCSHostType(t string) (VCSHostType, error) {
return BitbucketServer, nil
case "AzureDevops":
return AzureDevops, nil
+ case "Gitea":
+ return Gitea, nil
}
return -1, fmt.Errorf("%q is not a valid type", t)
diff --git a/server/events/post_workflow_hooks_command_runner.go b/server/events/post_workflow_hooks_command_runner.go
index 5e36794572..e1f7f10a9d 100644
--- a/server/events/post_workflow_hooks_command_runner.go
+++ b/server/events/post_workflow_hooks_command_runner.go
@@ -37,18 +37,10 @@ type DefaultPostWorkflowHooksCommandRunner struct {
}
// RunPostHooks runs post_workflow_hooks after a plan/apply has completed
-func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks(
- ctx *command.Context, cmd *CommentCommand,
-) error {
- pull := ctx.Pull
- baseRepo := pull.BaseRepo
- headRepo := ctx.HeadRepo
- user := ctx.User
- log := ctx.Log
-
+func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks(ctx *command.Context, cmd *CommentCommand) error {
postWorkflowHooks := make([]*valid.WorkflowHook, 0)
for _, repo := range w.GlobalCfg.Repos {
- if repo.IDMatches(baseRepo.ID()) && repo.BranchMatches(pull.BaseBranch) && len(repo.PostWorkflowHooks) > 0 {
+ if repo.IDMatches(ctx.Pull.BaseRepo.ID()) && repo.BranchMatches(ctx.Pull.BaseBranch) && len(repo.PostWorkflowHooks) > 0 {
postWorkflowHooks = append(postWorkflowHooks, repo.PostWorkflowHooks...)
}
}
@@ -58,16 +50,16 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks(
return nil
}
- log.Debug("post-hooks configured, running...")
+ ctx.Log.Debug("post-hooks configured, running...")
- unlockFn, err := w.WorkingDirLocker.TryLock(baseRepo.FullName, pull.Num, DefaultWorkspace, DefaultRepoRelDir)
+ unlockFn, err := w.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, DefaultWorkspace, DefaultRepoRelDir)
if err != nil {
return err
}
- log.Debug("got workspace lock")
+ ctx.Log.Debug("got workspace lock")
defer unlockFn()
- repoDir, _, err := w.WorkingDir.Clone(headRepo, pull, DefaultWorkspace)
+ repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace)
if err != nil {
return err
}
@@ -79,11 +71,11 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks(
err = w.runHooks(
models.WorkflowHookCommandContext{
- BaseRepo: baseRepo,
- HeadRepo: headRepo,
- Log: log,
- Pull: pull,
- User: user,
+ BaseRepo: ctx.Pull.BaseRepo,
+ HeadRepo: ctx.HeadRepo,
+ Log: ctx.Log,
+ Pull: ctx.Pull,
+ User: ctx.User,
Verbose: false,
EscapedCommentArgs: escapedArgs,
CommandName: cmd.Name.String(),
@@ -123,12 +115,12 @@ func (w *DefaultPostWorkflowHooksCommandRunner) runHooks(
ctx.HookID = uuid.NewString()
shell := hook.Shell
if shell == "" {
- ctx.Log.Debug("Setting shell to default: %q", shell)
+ ctx.Log.Debug("Setting shell to default: '%s'", shell)
shell = "sh"
}
shellArgs := hook.ShellArgs
if shellArgs == "" {
- ctx.Log.Debug("Setting shellArgs to default: %q", shellArgs)
+ ctx.Log.Debug("Setting shellArgs to default: '%s'", shellArgs)
shellArgs = "-c"
}
url, err := w.Router.GenerateProjectWorkflowHookURL(ctx.HookID)
diff --git a/server/events/post_workflow_hooks_command_runner_test.go b/server/events/post_workflow_hooks_command_runner_test.go
index 38cd5ee9ec..29996d8028 100644
--- a/server/events/post_workflow_hooks_command_runner_test.go
+++ b/server/events/post_workflow_hooks_command_runner_test.go
@@ -140,8 +140,10 @@ func TestRunPostHooks_Clone(t *testing.T) {
postWh.GlobalCfg = globalCfg
- When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](),
Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
@@ -180,7 +182,8 @@ func TestRunPostHooks_Clone(t *testing.T) {
whPostWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](),
Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
postWhWorkingDirLocker.VerifyWasCalled(Never()).TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, "path")
- postWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)
+ postWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))
})
t.Run("error locking work dir", func(t *testing.T) {
postWorkflowHooksSetup(t)
@@ -198,12 +201,14 @@ func TestRunPostHooks_Clone(t *testing.T) {
postWh.GlobalCfg = globalCfg
- When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error"))
+ When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error"))
err := postWh.RunPostHooks(ctx, planCmd)
Assert(t, err != nil, "error not nil")
- postWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)
+ postWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))
whPostWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](),
Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
})
@@ -229,8 +234,10 @@ func TestRunPostHooks_Clone(t *testing.T) {
postWh.GlobalCfg = globalCfg
- When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, errors.New("some error"))
+ When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error"))
err := postWh.RunPostHooks(ctx, planCmd)
@@ -262,8 +269,10 @@ func TestRunPostHooks_Clone(t *testing.T) {
postWh.GlobalCfg = globalCfg
- When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error"))
@@ -302,8 +311,10 @@ func TestRunPostHooks_Clone(t *testing.T) {
postWh.GlobalCfg = globalCfg
- When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
@@ -336,8 +347,10 @@ func TestRunPostHooks_Clone(t *testing.T) {
postWh.GlobalCfg = globalCfg
- When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand),
Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
@@ -370,8 +383,10 @@ func TestRunPostHooks_Clone(t *testing.T) {
postWh.GlobalCfg = globalCfg
- When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
@@ -404,16 +419,19 @@ func TestRunPostHooks_Clone(t *testing.T) {
postWh.GlobalCfg = globalCfg
- When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
- When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](),
- Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
+ When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
+ When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand),
+ Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
err := postWh.RunPostHooks(ctx, planCmd)
Ok(t, err)
whPostWorkflowHookRunner.VerifyWasCalledOnce().Run(Any[models.WorkflowHookCommandContext](),
- Eq(testHookWithShellandShellArgs.RunCommand), Eq(testHookWithShellandShellArgs.Shell), Eq(testHookWithShellandShellArgs.ShellArgs), Eq(repoDir))
+ Eq(testHookWithShellandShellArgs.RunCommand), Eq(testHookWithShellandShellArgs.Shell),
+ Eq(testHookWithShellandShellArgs.ShellArgs), Eq(repoDir))
Assert(t, *unlockCalled == true, "unlock function called")
})
@@ -438,8 +456,10 @@ func TestRunPostHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](),
Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
@@ -472,10 +492,12 @@ func TestRunPostHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
- When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](),
- Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
+ When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand),
+ Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
err := preWh.RunPreHooks(ctx, applyCmd)
@@ -506,10 +528,12 @@ func TestRunPostHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
- When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](),
- Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
+ When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand),
+ Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
err := preWh.RunPreHooks(ctx, planCmd)
diff --git a/server/events/pre_workflow_hooks_command_runner.go b/server/events/pre_workflow_hooks_command_runner.go
index 17b9864757..0e81f15ab9 100644
--- a/server/events/pre_workflow_hooks_command_runner.go
+++ b/server/events/pre_workflow_hooks_command_runner.go
@@ -38,15 +38,9 @@ type DefaultPreWorkflowHooksCommandRunner struct {
// RunPreHooks runs pre_workflow_hooks when PR is opened or updated.
func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, cmd *CommentCommand) error {
- pull := ctx.Pull
- baseRepo := pull.BaseRepo
- headRepo := ctx.HeadRepo
- user := ctx.User
- log := ctx.Log
-
preWorkflowHooks := make([]*valid.WorkflowHook, 0)
for _, repo := range w.GlobalCfg.Repos {
- if repo.IDMatches(baseRepo.ID()) && len(repo.PreWorkflowHooks) > 0 {
+ if repo.IDMatches(ctx.Pull.BaseRepo.ID()) && len(repo.PreWorkflowHooks) > 0 {
preWorkflowHooks = append(preWorkflowHooks, repo.PreWorkflowHooks...)
}
}
@@ -56,16 +50,16 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context,
return nil
}
- log.Debug("pre-hooks configured, running...")
+ ctx.Log.Debug("pre-hooks configured, running...")
- unlockFn, err := w.WorkingDirLocker.TryLock(baseRepo.FullName, pull.Num, DefaultWorkspace, DefaultRepoRelDir)
+ unlockFn, err := w.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, DefaultWorkspace, DefaultRepoRelDir)
if err != nil {
return err
}
- log.Debug("got workspace lock")
+ ctx.Log.Debug("got workspace lock")
defer unlockFn()
- repoDir, _, err := w.WorkingDir.Clone(headRepo, pull, DefaultWorkspace)
+ repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace)
if err != nil {
return err
}
@@ -89,11 +83,11 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context,
err = w.runHooks(
models.WorkflowHookCommandContext{
- BaseRepo: baseRepo,
- HeadRepo: headRepo,
- Log: log,
- Pull: pull,
- User: user,
+ BaseRepo: ctx.Pull.BaseRepo,
+ HeadRepo: ctx.HeadRepo,
+ Log: ctx.Log,
+ Pull: ctx.Pull,
+ User: ctx.User,
Verbose: false,
EscapedCommentArgs: escapedArgs,
CommandName: cmd.Name.String(),
@@ -132,12 +126,12 @@ func (w *DefaultPreWorkflowHooksCommandRunner) runHooks(
ctx.HookID = uuid.NewString()
shell := hook.Shell
if shell == "" {
- ctx.Log.Debug("Setting shell to default: %q", shell)
+ ctx.Log.Debug("Setting shell to default: '%s'", shell)
shell = "sh"
}
shellArgs := hook.ShellArgs
if shellArgs == "" {
- ctx.Log.Debug("Setting shellArgs to default: %q", shellArgs)
+ ctx.Log.Debug("Setting shellArgs to default: '%s'", shellArgs)
shellArgs = "-c"
}
url, err := w.Router.GenerateProjectWorkflowHookURL(ctx.HookID)
diff --git a/server/events/pre_workflow_hooks_command_runner_test.go b/server/events/pre_workflow_hooks_command_runner_test.go
index 3156797f86..191a8c27dc 100644
--- a/server/events/pre_workflow_hooks_command_runner_test.go
+++ b/server/events/pre_workflow_hooks_command_runner_test.go
@@ -142,8 +142,10 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
@@ -180,9 +182,11 @@ func TestRunPreHooks_Clone(t *testing.T) {
Ok(t, err)
- whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
+ whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
+ Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
preWhWorkingDirLocker.VerifyWasCalled(Never()).TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, "")
- preWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)
+ preWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))
})
t.Run("error locking work dir", func(t *testing.T) {
@@ -201,13 +205,16 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error"))
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error"))
err := preWh.RunPreHooks(ctx, planCmd)
Assert(t, err != nil, "error not nil")
- preWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)
- whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
+ preWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))
+ whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
+ Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
})
t.Run("error cloning", func(t *testing.T) {
@@ -231,14 +238,17 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, errors.New("some error"))
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error"))
err := preWh.RunPreHooks(ctx, planCmd)
Assert(t, err != nil, "error not nil")
- whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
+ whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
+ Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
Assert(t, *unlockCalled == true, "unlock function called")
})
@@ -263,8 +273,10 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error"))
@@ -303,14 +315,18 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
- When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
+ When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](),
+ Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
err := preWh.RunPreHooks(ctx, planCmd)
Ok(t, err)
- whPreWorkflowHookRunner.VerifyWasCalledOnce().Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
+ whPreWorkflowHookRunner.VerifyWasCalledOnce().Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
+ Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir))
Assert(t, *unlockCalled == true, "unlock function called")
})
@@ -335,8 +351,10 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand),
Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
@@ -369,8 +387,10 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand),
Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
@@ -403,10 +423,12 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
- When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](),
- Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
+ When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand),
+ Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
err := preWh.RunPreHooks(ctx, planCmd)
@@ -438,10 +460,12 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
- When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](),
- Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
+ When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand),
+ Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
err := preWh.RunPreHooks(ctx, planCmd)
@@ -472,10 +496,12 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
- When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](),
- Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
+ When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand),
+ Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
err := preWh.RunPreHooks(ctx, applyCmd)
@@ -506,10 +532,12 @@ func TestRunPreHooks_Clone(t *testing.T) {
preWh.GlobalCfg = globalCfg
- When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
- When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil)
- When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](),
- Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
+ When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace,
+ events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil)
+ When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull),
+ Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil)
+ When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand),
+ Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil)
err := preWh.RunPreHooks(ctx, planCmd)
diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go
index 98f1ef9997..41945ca170 100644
--- a/server/events/project_command_builder.go
+++ b/server/events/project_command_builder.go
@@ -11,7 +11,6 @@ import (
"github.com/runatlantis/atlantis/server/core/config/valid"
"github.com/runatlantis/atlantis/server/core/terraform"
- "github.com/runatlantis/atlantis/server/logging"
"github.com/runatlantis/atlantis/server/metrics"
"github.com/pkg/errors"
@@ -57,7 +56,6 @@ func NewInstrumentedProjectCommandBuilder(
IncludeGitUntrackedFiles bool,
AutoDiscoverMode string,
scope tally.Scope,
- logger logging.SimpleLogging,
terraformClient terraform.Client,
) *InstrumentedProjectCommandBuilder {
scope = scope.SubScope("builder")
@@ -89,11 +87,9 @@ func NewInstrumentedProjectCommandBuilder(
IncludeGitUntrackedFiles,
AutoDiscoverMode,
scope,
- logger,
terraformClient,
),
- Logger: logger,
- scope: scope,
+ scope: scope,
}
}
@@ -119,7 +115,6 @@ func NewProjectCommandBuilder(
IncludeGitUntrackedFiles bool,
AutoDiscoverMode string,
scope tally.Scope,
- _ logging.SimpleLogging,
terraformClient terraform.Client,
) *DefaultProjectCommandBuilder {
return &DefaultProjectCommandBuilder{
@@ -262,7 +257,7 @@ func (p *DefaultProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Contex
var autoplanEnabled []command.ProjectContext
for _, projCtx := range projCtxs {
if !projCtx.AutoplanEnabled {
- ctx.Log.Debug("ignoring project at dir %q, workspace: %q because autoplan is disabled", projCtx.RepoRelDir, projCtx.Workspace)
+ ctx.Log.Debug("ignoring project at dir '%s', workspace: '%s' because autoplan is disabled", projCtx.RepoRelDir, projCtx.Workspace)
continue
}
autoplanEnabled = append(autoplanEnabled, projCtx)
@@ -334,7 +329,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex
if p.IncludeGitUntrackedFiles {
ctx.Log.Debug(("'include-git-untracked-files' option is set, getting untracked files"))
- untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.HeadRepo, ctx.Pull, DefaultWorkspace)
+ untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace)
if err != nil {
return nil, err
}
@@ -402,7 +397,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex
ctx.Log.Debug("got workspace lock")
defer unlockFn()
- repoDir, _, err := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, workspace)
+ repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace)
if err != nil {
return nil, err
}
@@ -411,7 +406,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex
repoCfgFile := p.GlobalCfg.RepoConfigFile(ctx.Pull.BaseRepo.ID())
hasRepoCfg, err := p.ParserValidator.HasRepoCfg(repoDir, repoCfgFile)
if err != nil {
- return nil, errors.Wrapf(err, "looking for %s file in %q", repoCfgFile, repoDir)
+ return nil, errors.Wrapf(err, "looking for '%s' file in '%s'", repoCfgFile, repoDir)
}
var projCtxs []command.ProjectContext
@@ -440,7 +435,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex
if err != nil {
ctx.Log.Warn("error(s) loading project module dependencies: %s", err)
}
- ctx.Log.Debug("moduleInfo for %s (matching %q) = %v", repoDir, p.AutoDetectModuleFiles, moduleInfo)
+ ctx.Log.Debug("moduleInfo for '%s' (matching '%s') = %v", repoDir, p.AutoDetectModuleFiles, moduleInfo)
automerge := p.EnableAutoMerge
parallelApply := p.EnableParallelApply
@@ -467,7 +462,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex
ctx.Log.Info("%d projects are to be planned based on their when_modified config", len(matchingProjects))
for _, mp := range matchingProjects {
- ctx.Log.Debug("determining config for project at dir: %q workspace: %q", mp.Dir, mp.Workspace)
+ ctx.Log.Debug("determining config for project at dir: '%s' workspace: '%s'", mp.Dir, mp.Workspace)
mergedCfg := p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp, repoCfg)
projCtxs = append(projCtxs,
@@ -523,10 +518,11 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex
ctx.Log.Info("automatically determined that there were %d additional projects modified in this pull request: %s",
len(modifiedProjects), modifiedProjects)
for _, mp := range modifiedProjects {
- ctx.Log.Debug("determining config for project at dir: %q", mp.Path)
- pWorkspace, err := p.ProjectFinder.DetermineWorkspaceFromHCL(ctx.Log, repoDir)
+ ctx.Log.Debug("determining config for project at dir: '%s'", mp.Path)
+ absProjectDir := filepath.Join(repoDir, mp.Path)
+ pWorkspace, err := p.ProjectFinder.DetermineWorkspaceFromHCL(ctx.Log, absProjectDir)
if err != nil {
- return nil, errors.Wrapf(err, "looking for Terraform Cloud workspace from configuration %s", repoDir)
+ return nil, errors.Wrapf(err, "looking for Terraform Cloud workspace from configuration %s", absProjectDir)
}
pCfg := p.GlobalCfg.DefaultProjCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp.Path, pWorkspace)
@@ -574,7 +570,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont
defer unlockFn()
ctx.Log.Debug("cloning repository")
- _, _, err = p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, DefaultWorkspace)
+ _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace)
if err != nil {
return pcc, err
}
@@ -595,7 +591,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont
if p.IncludeGitUntrackedFiles {
ctx.Log.Debug(("'include-git-untracked-files' option is set, getting untracked files"))
- untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.HeadRepo, ctx.Pull, workspace)
+ untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace)
if err != nil {
return nil, err
}
@@ -652,7 +648,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont
if DefaultWorkspace != workspace {
ctx.Log.Debug("cloning repository with workspace %s", workspace)
- _, _, err = p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, workspace)
+ _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace)
if err != nil {
return pcc, err
}
@@ -682,7 +678,7 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName
repoCfgFile := p.GlobalCfg.RepoConfigFile(ctx.Pull.BaseRepo.ID())
hasRepoCfg, err := p.ParserValidator.HasRepoCfg(repoDir, repoCfgFile)
if err != nil {
- err = errors.Wrapf(err, "looking for %s file in %q", repoCfgFile, repoDir)
+ err = errors.Wrapf(err, "looking for '%s' file in '%s'", repoCfgFile, repoDir)
return
}
if !hasRepoCfg {
@@ -712,9 +708,9 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName
}
if len(projectsCfg) == 0 {
if p.SilenceNoProjects && len(repoConfig.Projects) > 0 {
- ctx.Log.Debug("no project with name %q found but silencing the error", projectName)
+ ctx.Log.Debug("no project with name '%s' found but silencing the error", projectName)
} else {
- err = fmt.Errorf("no project with name %q is defined in %s", projectName, repoCfgFile)
+ err = fmt.Errorf("no project with name '%s' is defined in '%s'", projectName, repoCfgFile)
}
return
}
@@ -726,7 +722,7 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName
return
}
if len(projCfgs) > 1 {
- err = fmt.Errorf("must specify project name: more than one project defined in %s matched dir: %q workspace: %q", repoCfgFile, dir, workspace)
+ err = fmt.Errorf("must specify project name: more than one project defined in '%s' matched dir: '%s' workspace: '%s'", repoCfgFile, dir, workspace)
return
}
projectsCfg = projCfgs
@@ -765,7 +761,7 @@ func (p *DefaultProjectCommandBuilder) buildAllProjectCommandsByPlan(ctx *comman
for _, plan := range plans {
commentCmds, err := p.buildProjectCommandCtx(ctx, commentCmd.CommandName(), commentCmd.SubName, plan.ProjectName, commentCmd.Flags, defaultRepoDir, plan.RepoRelDir, plan.Workspace, commentCmd.Verbose)
if err != nil {
- return nil, errors.Wrapf(err, "building command for dir %q", plan.RepoRelDir)
+ return nil, errors.Wrapf(err, "building command for dir '%s'", plan.RepoRelDir)
}
cmds = append(cmds, commentCmds...)
}
@@ -861,7 +857,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectCommandCtx(ctx *command.Conte
repoRelDir = projCfg.RepoRelDir
workspace = projCfg.Workspace
for _, mp := range matchingProjects {
- ctx.Log.Debug("Merging config for project at dir: %q workspace: %q", mp.Dir, mp.Workspace)
+ ctx.Log.Debug("Merging config for project at dir: '%s' workspace: '%s'", mp.Dir, mp.Workspace)
projCfg = p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp, *repoCfgPtr)
projCtxs = append(projCtxs,
diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go
index fc7c022073..2d45006959 100644
--- a/server/events/project_command_builder_internal_test.go
+++ b/server/events/project_command_builder_internal_test.go
@@ -630,10 +630,11 @@ projects:
})
workingDir := NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmp, false, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil)
// Write and parse the global config file.
globalCfgPath := filepath.Join(tmp, "global.yaml")
@@ -671,7 +672,6 @@ projects:
false,
"auto",
statsScope,
- logger,
terraformClient,
)
@@ -845,10 +845,11 @@ projects:
})
workingDir := NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmp, false, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil)
// Write and parse the global config file.
globalCfgPath := filepath.Join(tmp, "global.yaml")
@@ -862,7 +863,6 @@ projects:
Ok(t, os.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600))
}
- logger := logging.NewNoopLogger(t)
statsScope, _, _ := metrics.NewLoggingScope(logging.NewNoopLogger(t), "atlantis")
terraformClient := mocks.NewMockClient()
@@ -889,7 +889,6 @@ projects:
false,
"auto",
statsScope,
- logger,
terraformClient,
)
@@ -1091,10 +1090,11 @@ workflows:
})
workingDir := NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmp, false, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil)
// Write and parse the global config file.
globalCfgPath := filepath.Join(tmp, "global.yaml")
@@ -1136,7 +1136,6 @@ workflows:
false,
"auto",
statsScope,
- logger,
terraformClient,
)
@@ -1246,10 +1245,11 @@ projects:
})
workingDir := NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmp, false, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil)
// Write and parse the global config file.
globalCfgPath := filepath.Join(tmp, "global.yaml")
@@ -1289,7 +1289,6 @@ projects:
false,
"auto",
statsScope,
- logger,
terraformClient,
)
@@ -1386,10 +1385,11 @@ projects:
})
workingDir := NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmp, false, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.modifiedFiles, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn(c.modifiedFiles, nil)
// Write and parse the global config file.
globalCfgPath := filepath.Join(tmp, "global.yaml")
@@ -1431,7 +1431,6 @@ projects:
false,
"auto",
statsScope,
- logger,
terraformClient,
)
diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go
index 3bcc0294be..e1e35eb675 100644
--- a/server/events/project_command_builder_test.go
+++ b/server/events/project_command_builder_test.go
@@ -3,6 +3,7 @@ package events_test
import (
"os"
"path/filepath"
+ "sort"
"strings"
"testing"
@@ -48,6 +49,19 @@ var defaultUserConfig = struct {
AutoDiscoverMode: "auto",
}
+func ChangedFiles(dirStructure map[string]interface{}, parent string) []string {
+ var files []string
+ for k, v := range dirStructure {
+ switch v := v.(type) {
+ case map[string]interface{}:
+ files = append(files, ChangedFiles(v, k)...)
+ default:
+ files = append(files, filepath.Join(parent, k))
+ }
+ }
+ return files
+}
+
func TestDefaultProjectCommandBuilder_BuildAutoplanCommands(t *testing.T) {
// expCtxFields define the ctx fields we're going to assert on.
// Since we're focused on autoplanning here, we don't validate all the
@@ -57,11 +71,16 @@ func TestDefaultProjectCommandBuilder_BuildAutoplanCommands(t *testing.T) {
RepoRelDir string
Workspace string
}
+ defaultTestDirStructure := map[string]interface{}{
+ "main.tf": nil,
+ }
+
cases := []struct {
- Description string
- AtlantisYAML string
- ServerSideYAML string
- exp []expCtxFields
+ Description string
+ AtlantisYAML string
+ ServerSideYAML string
+ TestDirStructure map[string]interface{}
+ exp []expCtxFields
}{
{
Description: "simple atlantis.yaml",
@@ -70,6 +89,7 @@ version: 3
projects:
- dir: .
`,
+ TestDirStructure: defaultTestDirStructure,
exp: []expCtxFields{
{
ProjectName: "",
@@ -94,6 +114,7 @@ projects:
name: myname
workspace: myworkspace2
`,
+ TestDirStructure: defaultTestDirStructure,
exp: []expCtxFields{
{
ProjectName: "",
@@ -122,6 +143,7 @@ projects:
- dir: .
workspace: myworkspace2
`,
+ TestDirStructure: defaultTestDirStructure,
exp: []expCtxFields{
{
ProjectName: "",
@@ -142,7 +164,68 @@ version: 3
projects:
- dir: mydir
`,
- exp: nil,
+ TestDirStructure: defaultTestDirStructure,
+ exp: nil,
+ },
+ {
+ Description: "workspaces from subdirectories detected",
+ TestDirStructure: map[string]interface{}{
+ "work": map[string]interface{}{
+ "main.tf": `
+terraform {
+ cloud {
+ organization = "atlantis-test"
+ workspaces {
+ name = "test-workspace1"
+ }
+ }
+}`,
+ },
+ "test": map[string]interface{}{
+ "main.tf": `
+terraform {
+ cloud {
+ organization = "atlantis-test"
+ workspaces {
+ name = "test-workspace12"
+ }
+ }
+}`,
+ },
+ },
+ exp: []expCtxFields{
+ {
+ ProjectName: "",
+ RepoRelDir: "test",
+ Workspace: "test-workspace12",
+ },
+ {
+ ProjectName: "",
+ RepoRelDir: "work",
+ Workspace: "test-workspace1",
+ },
+ },
+ },
+ {
+ Description: "workspaces in parent directory are detected",
+ TestDirStructure: map[string]interface{}{
+ "main.tf": `
+terraform {
+ cloud {
+ organization = "atlantis-test"
+ workspaces {
+ name = "test-workspace"
+ }
+ }
+}`,
+ },
+ exp: []expCtxFields{
+ {
+ ProjectName: "",
+ RepoRelDir: ".",
+ Workspace: "test-workspace",
+ },
+ },
},
}
@@ -156,15 +239,13 @@ projects:
for _, c := range cases {
t.Run(c.Description, func(t *testing.T) {
RegisterMockTestingT(t)
- tmpDir := DirStructure(t, map[string]interface{}{
- "main.tf": nil,
- })
-
+ tmpDir := DirStructure(t, c.TestDirStructure)
workingDir := mocks.NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmpDir, false, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn(ChangedFiles(c.TestDirStructure, ""), nil)
if c.AtlantisYAML != "" {
err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600)
Ok(t, err)
@@ -194,7 +275,6 @@ projects:
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -207,6 +287,17 @@ projects:
})
Ok(t, err)
Equals(t, len(c.exp), len(ctxs))
+
+ // Sort so comparisons are deterministic
+ sort.Slice(ctxs, func(i, j int) bool {
+ if ctxs[i].ProjectName != ctxs[j].ProjectName {
+ return ctxs[i].ProjectName < ctxs[j].ProjectName
+ }
+ if ctxs[i].RepoRelDir != ctxs[j].RepoRelDir {
+ return ctxs[i].RepoRelDir < ctxs[j].RepoRelDir
+ }
+ return ctxs[i].Workspace < ctxs[j].Workspace
+ })
for i, actCtx := range ctxs {
expCtx := c.exp[i]
Equals(t, expCtx.ProjectName, actCtx.ProjectName)
@@ -384,7 +475,7 @@ projects:
dir: .
workspace: myworkspace
`,
- ExpErr: "must specify project name: more than one project defined in atlantis.yaml matched dir: \".\" workspace: \"myworkspace\"",
+ ExpErr: "must specify project name: more than one project defined in 'atlantis.yaml' matched dir: '.' workspace: 'myworkspace'",
},
{
Description: "atlantis.yaml with project flag not matching",
@@ -399,7 +490,7 @@ version: 3
projects:
- dir: .
`,
- ExpErr: "no project with name \"notconfigured\" is defined in atlantis.yaml",
+ ExpErr: "no project with name 'notconfigured' is defined in 'atlantis.yaml'",
},
{
Description: "atlantis.yaml with project flag not matching but silenced",
@@ -511,11 +602,12 @@ projects:
})
workingDir := mocks.NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmpDir, false, nil)
When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil)
if c.AtlantisYAML != "" {
err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600)
Ok(t, err)
@@ -550,7 +642,6 @@ projects:
userConfig.IncludeGitUntrackedFiles,
c.AutoDiscoverModeUserCfg,
scope,
- logger,
terraformClient,
)
@@ -700,11 +791,12 @@ projects:
tmpDir := DirStructure(t, c.DirectoryStructure)
workingDir := mocks.NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmpDir, false, nil)
When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil)
if c.AtlantisYAML != "" {
err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600)
Ok(t, err)
@@ -739,7 +831,6 @@ projects:
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1030,11 +1121,12 @@ projects:
tmpDir := DirStructure(t, c.DirStructure)
workingDir := mocks.NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmpDir, false, nil)
When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil)
if c.AtlantisYAML != "" {
err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600)
Ok(t, err)
@@ -1069,7 +1161,6 @@ projects:
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1169,7 +1260,6 @@ func TestDefaultProjectCommandBuilder_BuildMultiApply(t *testing.T) {
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1221,14 +1311,9 @@ projects:
err := os.WriteFile(filepath.Join(repoDir, valid.DefaultAtlantisFile), []byte(yamlCfg), 0600)
Ok(t, err)
- When(workingDir.Clone(
- Any[models.Repo](),
- Any[models.PullRequest](),
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
Any[string]())).ThenReturn(repoDir, false, nil)
- When(workingDir.GetWorkingDir(
- Any[models.Repo](),
- Any[models.PullRequest](),
- Any[string]())).ThenReturn(repoDir, nil)
+ When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(repoDir, nil)
globalCfgArgs := valid.GlobalCfgArgs{
AllowAllRepoSettings: true,
@@ -1262,7 +1347,6 @@ projects:
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1316,11 +1400,12 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) {
})
workingDir := mocks.NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmpDir, false, nil)
When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil)
globalCfgArgs := valid.GlobalCfgArgs{
AllowAllRepoSettings: true,
@@ -1351,7 +1436,6 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) {
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1470,19 +1554,12 @@ projects:
tmpDir := DirStructure(t, testCase.DirStructure)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(testCase.ModifiedFiles, nil)
-
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn(testCase.ModifiedFiles, nil)
workingDir := mocks.NewMockWorkingDir()
- When(workingDir.Clone(
- Any[models.Repo](),
- Any[models.PullRequest](),
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
Any[string]())).ThenReturn(tmpDir, false, nil)
-
- When(workingDir.GetWorkingDir(
- Any[models.Repo](),
- Any[models.PullRequest](),
- Any[string]())).ThenReturn(tmpDir, nil)
+ When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil)
globalCfgArgs := valid.GlobalCfgArgs{
AllowAllRepoSettings: true,
@@ -1521,7 +1598,6 @@ projects:
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1633,7 +1709,6 @@ projects:
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1651,7 +1726,8 @@ projects:
})
Ok(t, err)
Equals(t, c.ExpectedCtxs, len(actCtxs))
- workingDir.VerifyWasCalled(c.ExpectedClones).Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())
+ workingDir.VerifyWasCalled(c.ExpectedClones).Clone(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest](), Any[string]())
}
}
@@ -1666,10 +1742,11 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman
userConfig := defaultUserConfig
workingDir := mocks.NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmpDir, false, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil)
globalCfgArgs := valid.GlobalCfgArgs{
AllowAllRepoSettings: false,
@@ -1702,7 +1779,6 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1792,7 +1868,6 @@ func TestDefaultProjectCommandBuilder_BuildVersionCommand(t *testing.T) {
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1886,12 +1961,14 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_Single_With_RestrictFile
tmpDir := DirStructure(t, c.DirectoryStructure)
workingDir := mocks.NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmpDir, false, nil)
When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil)
- When(workingDir.GetGitUntrackedFiles(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil)
+ When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(c.UntrackedFiles, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil)
if c.AtlantisYAML != "" {
err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600)
Ok(t, err)
@@ -1922,7 +1999,6 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_Single_With_RestrictFile
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
@@ -1997,12 +2073,14 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_with_IncludeGitUntracked
tmpDir := DirStructure(t, c.DirectoryStructure)
workingDir := mocks.NewMockWorkingDir()
- When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil)
+ When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(tmpDir, false, nil)
When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil)
- When(workingDir.GetGitUntrackedFiles(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil)
+ When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(c.UntrackedFiles, nil)
vcsClient := vcsmocks.NewMockClient()
- When(vcsClient.GetModifiedFiles(
- Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil)
+ When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](),
+ Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil)
if c.AtlantisYAML != "" {
err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600)
Ok(t, err)
@@ -2033,7 +2111,6 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_with_IncludeGitUntracked
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverMode,
scope,
- logger,
terraformClient,
)
diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go
index 38935aa421..1deba17643 100644
--- a/server/events/project_command_runner.go
+++ b/server/events/project_command_runner.go
@@ -554,7 +554,7 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model
p.WorkingDir.SetCheckForUpstreamChanges()
// Clone is idempotent so okay to run even if the repo was already cloned.
- repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace)
+ repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace)
if cloneErr != nil {
if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil {
ctx.Log.Err("error unlocking state after plan error: %v", unlockErr)
@@ -667,7 +667,7 @@ func (p *DefaultProjectCommandRunner) doVersion(ctx command.ProjectContext) (ver
func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out *models.ImportSuccess, failure string, err error) {
// Clone is idempotent so okay to run even if the repo was already cloned.
- repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace)
+ repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace)
if cloneErr != nil {
return nil, "", cloneErr
}
@@ -713,7 +713,7 @@ func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out
func (p *DefaultProjectCommandRunner) doStateRm(ctx command.ProjectContext) (out *models.StateRmSuccess, failure string, err error) {
// Clone is idempotent so okay to run even if the repo was already cloned.
- repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace)
+ repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace)
if cloneErr != nil {
return nil, "", cloneErr
}
diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go
index 4446a0054d..7b1369bf7a 100644
--- a/server/events/project_command_runner_test.go
+++ b/server/events/project_command_runner_test.go
@@ -63,22 +63,10 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) {
}
repoDir := t.TempDir()
- When(mockWorkingDir.Clone(
- Any[models.Repo](),
- Any[models.PullRequest](),
- Any[string](),
- )).ThenReturn(repoDir, false, nil)
- When(mockLocker.TryLock(
- Any[logging.SimpleLogging](),
- Any[models.PullRequest](),
- Any[models.User](),
- Any[string](),
- Any[models.Project](),
- AnyBool(),
- )).ThenReturn(&events.TryLockResponse{
- LockAcquired: true,
- LockKey: "lock-key",
- }, nil)
+ When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(repoDir, false, nil)
+ When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](),
+ Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil)
expEnvs := map[string]string{
"name": "value",
@@ -317,7 +305,7 @@ func TestDefaultProjectCommandRunner_ApplyDiverged(t *testing.T) {
}
tmp := t.TempDir()
When(mockWorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, ctx.Workspace)).ThenReturn(tmp, nil)
- When(mockWorkingDir.HasDiverged(tmp)).ThenReturn(true)
+ When(mockWorkingDir.HasDiverged(ctx.Log, tmp)).ThenReturn(true)
res := runner.Apply(ctx)
Equals(t, "Default branch must be rebased onto pull request before running apply.", res.Failure)
@@ -560,22 +548,10 @@ func TestDefaultProjectCommandRunner_RunEnvSteps(t *testing.T) {
}
repoDir := t.TempDir()
- When(mockWorkingDir.Clone(
- Any[models.Repo](),
- Any[models.PullRequest](),
- Any[string](),
- )).ThenReturn(repoDir, false, nil)
- When(mockLocker.TryLock(
- Any[logging.SimpleLogging](),
- Any[models.PullRequest](),
- Any[models.User](),
- Any[string](),
- Any[models.Project](),
- AnyBool(),
- )).ThenReturn(&events.TryLockResponse{
- LockAcquired: true,
- LockKey: "lock-key",
- }, nil)
+ When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(repoDir, false, nil)
+ When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](),
+ Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil)
ctx := command.ProjectContext{
Log: logging.NewNoopLogger(t),
@@ -714,11 +690,8 @@ func TestDefaultProjectCommandRunner_Import(t *testing.T) {
RePlanCmd: "atlantis plan -d . -- addr id",
}
repoDir := t.TempDir()
- When(mockWorkingDir.Clone(
- Any[models.Repo](),
- Any[models.PullRequest](),
- Any[string](),
- )).ThenReturn(repoDir, false, nil)
+ When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](),
+ Any[string]())).ThenReturn(repoDir, false, nil)
if c.setup != nil {
c.setup(repoDir, ctx, mockLocker, mockInit, mockImport)
}
diff --git a/server/events/project_locker_test.go b/server/events/project_locker_test.go
index 62be1c40f9..268faf20ee 100644
--- a/server/events/project_locker_test.go
+++ b/server/events/project_locker_test.go
@@ -29,7 +29,7 @@ import (
func TestDefaultProjectLocker_TryLockWhenLocked(t *testing.T) {
var githubClient *vcs.GithubClient
- mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil)
+ mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil)
mockLocker := mocks.NewMockLocker()
locker := events.DefaultProjectLocker{
Locker: mockLocker,
@@ -65,7 +65,7 @@ func TestDefaultProjectLocker_TryLockWhenLocked(t *testing.T) {
func TestDefaultProjectLocker_TryLockWhenLockedSamePull(t *testing.T) {
RegisterMockTestingT(t)
var githubClient *vcs.GithubClient
- mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil)
+ mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil)
mockLocker := mocks.NewMockLocker()
locker := events.DefaultProjectLocker{
Locker: mockLocker,
@@ -104,7 +104,7 @@ func TestDefaultProjectLocker_TryLockWhenLockedSamePull(t *testing.T) {
func TestDefaultProjectLocker_TryLockUnlocked(t *testing.T) {
RegisterMockTestingT(t)
var githubClient *vcs.GithubClient
- mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil)
+ mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil)
mockLocker := mocks.NewMockLocker()
locker := events.DefaultProjectLocker{
Locker: mockLocker,
@@ -142,7 +142,7 @@ func TestDefaultProjectLocker_TryLockUnlocked(t *testing.T) {
func TestDefaultProjectLocker_RepoLocking(t *testing.T) {
var githubClient *vcs.GithubClient
- mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil)
+ mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil)
expProject := models.Project{}
expWorkspace := "default"
expPull := models.PullRequest{Num: 2}
diff --git a/server/events/pull_closed_executor.go b/server/events/pull_closed_executor.go
index 64b929633b..5c005dbc9a 100644
--- a/server/events/pull_closed_executor.go
+++ b/server/events/pull_closed_executor.go
@@ -51,7 +51,6 @@ type PullClosedExecutor struct {
Locker locking.Locker
VCSClient vcs.Client
WorkingDir WorkingDir
- Logger logging.SimpleLogging
Backend locking.Backend
PullClosedTemplate PullCleanupTemplate
LogStreamResourceCleaner ResourceCleaner
@@ -82,7 +81,7 @@ func (p *PullClosedExecutor) CleanUpPull(logger logging.SimpleLogging, repo mode
pullStatus, err := p.Backend.GetPullStatus(pull)
if err != nil {
// Log and continue to clean up other resources.
- p.Logger.Err("retrieving pull status: %s", err)
+ logger.Err("retrieving pull status: %s", err)
}
if pullStatus != nil {
@@ -97,7 +96,7 @@ func (p *PullClosedExecutor) CleanUpPull(logger logging.SimpleLogging, repo mode
}
}
- if err := p.WorkingDir.Delete(repo, pull); err != nil {
+ if err := p.WorkingDir.Delete(logger, repo, pull); err != nil {
return errors.Wrap(err, "cleaning workspace")
}
@@ -111,7 +110,7 @@ func (p *PullClosedExecutor) CleanUpPull(logger logging.SimpleLogging, repo mode
// Delete pull from DB.
if err := p.Backend.DeletePullStatus(pull); err != nil {
- p.Logger.Err("deleting pull from db: %s", err)
+ logger.Err("deleting pull from db: %s", err)
}
// If there are no locks then there's no need to comment.
diff --git a/server/events/pull_closed_executor_test.go b/server/events/pull_closed_executor_test.go
index 1236060d39..df904a1c6f 100644
--- a/server/events/pull_closed_executor_test.go
+++ b/server/events/pull_closed_executor_test.go
@@ -50,7 +50,7 @@ func TestCleanUpPullWorkspaceErr(t *testing.T) {
Backend: db,
}
err = errors.New("err")
- When(w.Delete(testdata.GithubRepo, testdata.Pull)).ThenReturn(err)
+ When(w.Delete(logger, testdata.GithubRepo, testdata.Pull)).ThenReturn(err)
actualErr := pce.CleanUpPull(logger, testdata.GithubRepo, testdata.Pull)
Equals(t, "cleaning workspace: err", actualErr.Error())
}
@@ -271,7 +271,6 @@ func TestCleanUpLogStreaming(t *testing.T) {
VCSClient: client,
PullClosedTemplate: &events.PullClosedEventTemplate{},
LogStreamResourceCleaner: prjCmdOutHandler,
- Logger: logger,
}
locks := []models.ProjectLock{
diff --git a/server/events/pull_updater.go b/server/events/pull_updater.go
index d640e5a374..2fd2b99a16 100644
--- a/server/events/pull_updater.go
+++ b/server/events/pull_updater.go
@@ -29,7 +29,7 @@ func (c *PullUpdater) updatePull(ctx *command.Context, cmd PullCommand, res comm
}
}
- comment := c.MarkdownRenderer.Render(res, cmd.CommandName(), cmd.SubCommandName(), ctx.Log.GetHistory(), cmd.IsVerbose(), ctx.Pull.BaseRepo.VCSHost.Type)
+ comment := c.MarkdownRenderer.Render(ctx, res, cmd)
if err := c.VCSClient.CreateComment(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull.Num, comment, cmd.CommandName().String()); err != nil {
ctx.Log.Err("unable to comment: %s", err)
}
diff --git a/server/events/templates/import_success_unwrapped.tmpl b/server/events/templates/import_success_unwrapped.tmpl
index c8a8a1b19d..08b6336d4d 100644
--- a/server/events/templates/import_success_unwrapped.tmpl
+++ b/server/events/templates/import_success_unwrapped.tmpl
@@ -6,5 +6,7 @@
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `{{.RePlanCmd}}`
+ ```shell
+ {{.RePlanCmd}}
+ ```
{{ end -}}
diff --git a/server/events/templates/import_success_wrapped.tmpl b/server/events/templates/import_success_wrapped.tmpl
index 12711c1d4d..00d9689a38 100644
--- a/server/events/templates/import_success_wrapped.tmpl
+++ b/server/events/templates/import_success_wrapped.tmpl
@@ -8,5 +8,7 @@
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `{{ .RePlanCmd }}`
+ ```shell
+ {{ .RePlanCmd }}
+ ```
{{ end -}}
diff --git a/server/events/templates/log.tmpl b/server/events/templates/log.tmpl
index cb409801c7..305436eebb 100644
--- a/server/events/templates/log.tmpl
+++ b/server/events/templates/log.tmpl
@@ -1,7 +1,7 @@
{{ define "log" -}}
-{{ if .Verbose }}
+{{ if .Verbose -}}
Log
-
+
```
{{.Log}}```
diff --git a/server/events/templates/merged_again.tmpl b/server/events/templates/merged_again.tmpl
index 796afe552a..ece363f19e 100644
--- a/server/events/templates/merged_again.tmpl
+++ b/server/events/templates/merged_again.tmpl
@@ -1,5 +1,5 @@
{{ define "mergedAgain" -}}
-{{ if .MergedAgain }}
+{{ if .MergedAgain -}}
:twisted_rightwards_arrows: Upstream was modified, a new merge was performed.
{{ end -}}
{{ end -}}
diff --git a/server/events/templates/multi_project_apply.tmpl b/server/events/templates/multi_project_apply.tmpl
index 50038555b3..2e2b2baa30 100644
--- a/server/events/templates/multi_project_apply.tmpl
+++ b/server/events/templates/multi_project_apply.tmpl
@@ -1,5 +1,5 @@
{{ define "multiProjectApply" -}}
-{{ template "multiProjectHeader" . }}
+{{ template "multiProjectHeader" . -}}
{{ range $i, $result := .Results -}}
### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}`
{{ $result.Rendered }}
diff --git a/server/events/templates/multi_project_header.tmpl b/server/events/templates/multi_project_header.tmpl
index c202c7e50c..c1ce5dc053 100644
--- a/server/events/templates/multi_project_header.tmpl
+++ b/server/events/templates/multi_project_header.tmpl
@@ -3,5 +3,9 @@ Ran {{.Command}} for {{ len .Results }} projects:
{{ range $result := .Results -}}
1. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}`
+{{ end -}}
+{{ if (gt (len .Results) 0) -}}
+---
+
{{ end -}}
{{ end -}}
diff --git a/server/events/templates/multi_project_import.tmpl b/server/events/templates/multi_project_import.tmpl
index 22e4b4388d..31cd70cbd4 100644
--- a/server/events/templates/multi_project_import.tmpl
+++ b/server/events/templates/multi_project_import.tmpl
@@ -1,5 +1,5 @@
{{ define "multiProjectImport" -}}
-{{ template "multiProjectHeader" . }}
+{{ template "multiProjectHeader" . -}}
{{ range $i, $result := .Results -}}
### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}`
{{ $result.Rendered }}
diff --git a/server/events/templates/multi_project_plan.tmpl b/server/events/templates/multi_project_plan.tmpl
index 9c3898ad48..f57e96794a 100644
--- a/server/events/templates/multi_project_plan.tmpl
+++ b/server/events/templates/multi_project_plan.tmpl
@@ -1,5 +1,5 @@
{{ define "multiProjectPlan" -}}
-{{ template "multiProjectHeader" . }}
+{{ template "multiProjectHeader" . -}}
{{ $disableApplyAll := .DisableApplyAll -}}
{{ $hideUnchangedPlans := .HideUnchangedPlanComments -}}
{{ range $i, $result := .Results -}}
diff --git a/server/events/templates/multi_project_plan_footer.tmpl b/server/events/templates/multi_project_plan_footer.tmpl
index 41683ab018..1c193a16b7 100644
--- a/server/events/templates/multi_project_plan_footer.tmpl
+++ b/server/events/templates/multi_project_plan_footer.tmpl
@@ -4,10 +4,14 @@
{{ len .Results }} projects, {{ .NumPlansWithChanges }} with changes, {{ .NumPlansWithNoChanges }} with no changes, {{ .NumPlanFailures }} failed
{{ if and (not .PlansDeleted) (ne .DisableApplyAll true) }}
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `{{ .ExecutableName }} apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `{{ .ExecutableName }} unlock`
+* :fast_forward: To **apply** all unapplied plans from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} unlock
+ ```
{{ end -}}
{{ end -}}
{{ end -}}
diff --git a/server/events/templates/multi_project_policy.tmpl b/server/events/templates/multi_project_policy.tmpl
index c34c59f896..add574fde4 100644
--- a/server/events/templates/multi_project_policy.tmpl
+++ b/server/events/templates/multi_project_policy.tmpl
@@ -1,5 +1,5 @@
{{ define "multiProjectPolicy" -}}
-{{ template "multiProjectHeader" . }}
+{{ template "multiProjectHeader" . -}}
{{ $disableApplyAll := .DisableApplyAll -}}
{{ $hideUnchangedPlans := .HideUnchangedPlanComments -}}
{{ range $i, $result := .Results -}}
@@ -13,10 +13,14 @@
{{ end -}}
{{ if ne .DisableApplyAll true -}}
{{ if and (gt (len .Results) 0) (not .PlansDeleted) -}}
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `{{ .ExecutableName }} apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `{{ .ExecutableName }} unlock`
+* :fast_forward: To **apply** all unapplied plans from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} unlock
+ ```
{{ end -}}
{{ end -}}
{{ template "log" . -}}
diff --git a/server/events/templates/multi_project_policy_unsuccessful.tmpl b/server/events/templates/multi_project_policy_unsuccessful.tmpl
index a0a59fd994..039dd9ce7c 100644
--- a/server/events/templates/multi_project_policy_unsuccessful.tmpl
+++ b/server/events/templates/multi_project_policy_unsuccessful.tmpl
@@ -1,5 +1,5 @@
{{ define "multiProjectPolicyUnsuccessful" -}}
-{{ template "multiProjectHeader" . }}
+{{ template "multiProjectHeader" . -}}
{{ $disableApplyAll := .DisableApplyAll -}}
{{ range $i, $result := .Results -}}
### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}`
@@ -11,12 +11,18 @@
{{ end -}}
{{ if ne .DisableApplyAll true -}}
{{ if and (gt (len .Results) 0) (not .PlansDeleted) -}}
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `{{ .ExecutableName }} approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `{{ .ExecutableName }} unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `{{ .ExecutableName }} plan`
+ ```shell
+ {{ .ExecutableName }} plan
+ ```
{{ end -}}
{{ end -}}
{{- template "log" . -}}
diff --git a/server/events/templates/multi_project_state_rm.tmpl b/server/events/templates/multi_project_state_rm.tmpl
index 90c0259dfe..a00464a7b8 100644
--- a/server/events/templates/multi_project_state_rm.tmpl
+++ b/server/events/templates/multi_project_state_rm.tmpl
@@ -1,5 +1,5 @@
{{ define "multiProjectStateRm" -}}
-{{ template "multiProjectHeader" . }}
+{{ template "multiProjectHeader" . -}}
{{ range $i, $result := .Results -}}
### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}`
{{ $result.Rendered}}
diff --git a/server/events/templates/multi_project_version.tmpl b/server/events/templates/multi_project_version.tmpl
index 08266520e5..70eeea40f9 100644
--- a/server/events/templates/multi_project_version.tmpl
+++ b/server/events/templates/multi_project_version.tmpl
@@ -1,5 +1,5 @@
{{ define "multiProjectVersion" -}}
-{{ template "multiProjectHeader" . }}
+{{ template "multiProjectHeader" . -}}
{{ range $i, $result := .Results -}}
### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}`
{{ $result.Rendered}}
diff --git a/server/events/templates/plan_success_unwrapped.tmpl b/server/events/templates/plan_success_unwrapped.tmpl
index 6bd81de233..e4ed2e0911 100644
--- a/server/events/templates/plan_success_unwrapped.tmpl
+++ b/server/events/templates/plan_success_unwrapped.tmpl
@@ -8,13 +8,17 @@ This plan was not saved because one or more projects failed and automerge requir
{{ else -}}
{{ if not .DisableApply -}}
* :arrow_forward: To **apply** this plan, comment:
- * `{{ .ApplyCmd }}`
+ ```shell
+ {{ .ApplyCmd }}
+ ```
{{ end -}}
{{ if not .DisableRepoLocking -}}
-* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }})
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }})
{{ end -}}
* :repeat: To **plan** this project again, comment:
- * `{{ .RePlanCmd }}`
+ ```shell
+ {{ .RePlanCmd }}
+ ```
{{ end -}}
-{{ template "mergedAgain" . }}
+{{ template "mergedAgain" . -}}
{{ end -}}
diff --git a/server/events/templates/plan_success_wrapped.tmpl b/server/events/templates/plan_success_wrapped.tmpl
index cef96d0609..55c0d3042a 100644
--- a/server/events/templates/plan_success_wrapped.tmpl
+++ b/server/events/templates/plan_success_wrapped.tmpl
@@ -4,21 +4,25 @@
```diff
{{ if .EnableDiffMarkdownFormat }}{{ .DiffMarkdownFormattedTerraformOutput }}{{ else }}{{ .TerraformOutput }}{{ end }}
```
+
{{ if .PlanWasDeleted -}}
This plan was not saved because one or more projects failed and automerge requires all plans pass.
{{ else -}}
{{ if not .DisableApply -}}
* :arrow_forward: To **apply** this plan, comment:
- * `{{ .ApplyCmd }}`
+ ```shell
+ {{ .ApplyCmd }}
+ ```
{{ end -}}
{{ if not .DisableRepoLocking -}}
-* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }})
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }})
{{ end -}}
* :repeat: To **plan** this project again, comment:
- * `{{ .RePlanCmd }}`
+ ```shell
+ {{ .RePlanCmd }}
+ ```
{{ end -}}
-
{{ .PlanSummary -}}
{{ template "mergedAgain" . -}}
{{ end -}}
diff --git a/server/events/templates/policy_check_results_unwrapped.tmpl b/server/events/templates/policy_check_results_unwrapped.tmpl
index 089e85660f..16d7b9e865 100644
--- a/server/events/templates/policy_check_results_unwrapped.tmpl
+++ b/server/events/templates/policy_check_results_unwrapped.tmpl
@@ -14,16 +14,22 @@
{{- end }}
{{- if .PolicyCleared }}
* :arrow_forward: To **apply** this plan, comment:
- * `{{ .ApplyCmd }}`
+ ```shell
+ {{ .ApplyCmd }}
+ ```
{{- else }}
#### Policy Approval Status:
```
{{ .PolicyApprovalSummary }}
```
* :heavy_check_mark: To **approve** this project, comment:
- * `{{ .ApprovePoliciesCmd }}`
+ ```shell
+ {{ .ApprovePoliciesCmd }}
+ ```
{{- end }}
-* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }})
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }})
* :repeat: To re-run policies **plan** this project again by commenting:
- * `{{ .RePlanCmd }}`
+ ```shell
+ {{ .RePlanCmd }}
+ ```
{{ end -}}
diff --git a/server/events/templates/policy_check_results_wrapped.tmpl b/server/events/templates/policy_check_results_wrapped.tmpl
index bf03a6b1f1..330980c2f4 100644
--- a/server/events/templates/policy_check_results_wrapped.tmpl
+++ b/server/events/templates/policy_check_results_wrapped.tmpl
@@ -15,23 +15,32 @@
{{- end }}
{{- if .PolicyCleared }}
* :arrow_forward: To **apply** this plan, comment:
- * `{{ .ApplyCmd }}`
+ ```shell
+ {{ .ApplyCmd }}
+ ```
{{- else }}
+
#### Policy Approval Status:
```
{{ .PolicyApprovalSummary }}
```
* :heavy_check_mark: To **approve** this project, comment:
- * `{{ .ApprovePoliciesCmd }}`
+ ```shell
+ {{ .ApprovePoliciesCmd }}
+ ```
{{- end }}
-* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }})
+* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }})
* :repeat: To re-run policies **plan** this project again by commenting:
- * `{{ .RePlanCmd }}`
-
+ ```shell
+ {{ .RePlanCmd }}
+ ```
{{- if eq .Command "Policy Check" }}
+{{- if ne .PolicyCheckSummary "" }}
```
{{ .PolicyCheckSummary }}
```
{{- end }}
-{{ end -}}
\ No newline at end of file
+
+{{- end }}
+{{ end -}}
diff --git a/server/events/templates/single_project_plan_success.tmpl b/server/events/templates/single_project_plan_success.tmpl
index afbe3d5701..77f6e13d64 100644
--- a/server/events/templates/single_project_plan_success.tmpl
+++ b/server/events/templates/single_project_plan_success.tmpl
@@ -5,10 +5,14 @@ Ran {{ .Command }} for {{ if $result.ProjectName }}project: `{{ $result.ProjectN
{{ $result.Rendered }}
{{ if ne .DisableApplyAll true }}
---
-* :fast_forward: To **apply** all unapplied plans from this pull request, comment:
- * `{{ .ExecutableName }} apply`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `{{ .ExecutableName }} unlock`
+* :fast_forward: To **apply** all unapplied plans from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} apply
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} unlock
+ ```
{{ end -}}
{{ template "log" . -}}
{{ end -}}
diff --git a/server/events/templates/single_project_policy_unsuccessful.tmpl b/server/events/templates/single_project_policy_unsuccessful.tmpl
index 0760406814..0bf0ac1a0c 100644
--- a/server/events/templates/single_project_policy_unsuccessful.tmpl
+++ b/server/events/templates/single_project_policy_unsuccessful.tmpl
@@ -3,14 +3,20 @@
Ran {{ .Command }} for {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}`
{{ $result.Rendered }}
-{{ if ne .DisableApplyAll true }}
+{{ if ne .DisableApplyAll true -}}
---
-* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment:
- * `{{ .ExecutableName }} approve_policies`
-* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment:
- * `{{ .ExecutableName }} unlock`
+* :heavy_check_mark: To **approve** all unapplied plans from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} approve_policies
+ ```
+* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment:
+ ```shell
+ {{ .ExecutableName }} unlock
+ ```
* :repeat: To re-run policies **plan** this project again by commenting:
- * `{{ .ExecutableName }} plan`
+ ```shell
+ {{ .ExecutableName }} plan
+ ```
{{ end -}}
{{- template "log" . -}}
{{ end -}}
diff --git a/server/events/templates/state_rm_success_unwrapped.tmpl b/server/events/templates/state_rm_success_unwrapped.tmpl
index c0f24323a5..564d8796ae 100644
--- a/server/events/templates/state_rm_success_unwrapped.tmpl
+++ b/server/events/templates/state_rm_success_unwrapped.tmpl
@@ -6,5 +6,7 @@
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `{{.RePlanCmd}}`
+ ```shell
+ {{.RePlanCmd}}
+ ```
{{ end }}
diff --git a/server/events/templates/state_rm_success_wrapped.tmpl b/server/events/templates/state_rm_success_wrapped.tmpl
index f182c85bc1..2a703107c6 100644
--- a/server/events/templates/state_rm_success_wrapped.tmpl
+++ b/server/events/templates/state_rm_success_wrapped.tmpl
@@ -8,5 +8,7 @@
:put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying.
* :repeat: To **plan** this project again, comment:
- * `{{.RePlanCmd}}`
+ ```shell
+ {{.RePlanCmd}}
+ ```
{{ end }}
diff --git a/server/events/unlock_command_runner.go b/server/events/unlock_command_runner.go
index 470fe26118..af360adf83 100644
--- a/server/events/unlock_command_runner.go
+++ b/server/events/unlock_command_runner.go
@@ -56,7 +56,7 @@ func (u *UnlockCommandRunner) Run(ctx *command.Context, _ *CommentCommand) {
var numLocks int
if err == nil && !hasLabel {
- numLocks, err = u.deleteLockCommand.DeleteLocksByPull(baseRepo.FullName, pullNum)
+ numLocks, err = u.deleteLockCommand.DeleteLocksByPull(ctx.Log, baseRepo.FullName, pullNum)
if err != nil {
vcsMessage = "Failed to delete PR locks"
ctx.Log.Err("failed to delete locks by pull %s", err.Error())
diff --git a/server/events/vcs/gitea/client.go b/server/events/vcs/gitea/client.go
new file mode 100644
index 0000000000..f9deb2cb74
--- /dev/null
+++ b/server/events/vcs/gitea/client.go
@@ -0,0 +1,517 @@
+// Copyright 2024 Martijn van der Kleijn & Florian Beisel
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package gitea
+
+import (
+ "context"
+ "encoding/base64"
+ "fmt"
+ "strings"
+ "time"
+
+ "code.gitea.io/sdk/gitea"
+ "github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/logging"
+)
+
+// Emergency break for Gitea pagination (just in case)
+// Set to 500 to prevent runaway situations
+// Value chosen purposely high, though randomly.
+const giteaPaginationEBreak = 500
+
+type GiteaClient struct {
+ giteaClient *gitea.Client
+ username string
+ token string
+ pageSize int
+ ctx context.Context
+}
+
+type GiteaPRReviewSummary struct {
+ Reviews []GiteaReview
+}
+
+type GiteaReview struct {
+ ID int64
+ Body string
+ Reviewer string
+ State gitea.ReviewStateType // e.g., "APPROVED", "PENDING", "REQUEST_CHANGES"
+ SubmittedAt time.Time
+}
+
+type GiteaPullGetter interface {
+ GetPullRequest(repo models.Repo, pullNum int) (*gitea.PullRequest, error)
+}
+
+// NewClient builds a client that makes API calls to Gitea. httpClient is the
+// client to use to make the requests, username and password are used as basic
+// auth in the requests, baseURL is the API's baseURL, ex. https://corp.com:7990.
+// Don't include the API version, ex. '/1.0'.
+func NewClient(baseURL string, username string, token string, pagesize int, logger logging.SimpleLogging) (*GiteaClient, error) {
+ logger.Debug("Creating new Gitea client for: %s", baseURL)
+
+ giteaClient, err := gitea.NewClient(baseURL,
+ gitea.SetToken(token),
+ gitea.SetUserAgent("atlantis"),
+ )
+
+ if err != nil {
+ return nil, errors.Wrap(err, "creating gitea client")
+ }
+
+ return &GiteaClient{
+ giteaClient: giteaClient,
+ username: username,
+ token: token,
+ pageSize: pagesize,
+ ctx: context.Background(),
+ }, nil
+}
+
+func (c *GiteaClient) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*gitea.PullRequest, error) {
+ logger.Debug("Getting Gitea pull request %d", pullNum)
+
+ pr, resp, err := c.giteaClient.GetPullRequest(repo.Owner, repo.Name, int64(pullNum))
+
+ if err != nil {
+ logger.Debug("GET /repos/%v/%v/pulls/%d returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode)
+ return nil, err
+ }
+
+ return pr, nil
+}
+
+// GetModifiedFiles returns the names of files that were modified in the merge request
+// relative to the repo root, e.g. parent/child/file.txt.
+func (c *GiteaClient) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) {
+ logger.Debug("Getting modified files for Gitea pull request %d", pull.Num)
+
+ changedFiles := make([]string, 0)
+ page := 0
+ nextPage := 1
+ listOptions := gitea.ListPullRequestFilesOptions{
+ ListOptions: gitea.ListOptions{
+ Page: 1,
+ PageSize: c.pageSize,
+ },
+ }
+
+ for page < nextPage {
+ page = +1
+ listOptions.ListOptions.Page = page
+ files, resp, err := c.giteaClient.ListPullRequestFiles(repo.Owner, repo.Name, int64(pull.Num), listOptions)
+ if err != nil {
+ logger.Debug("[page %d] GET /repos/%v/%v/pulls/%d/files returned: %v", page, repo.Owner, repo.Name, pull.Num, resp.StatusCode)
+ return nil, err
+ }
+
+ for _, file := range files {
+ changedFiles = append(changedFiles, file.Filename)
+ }
+
+ nextPage = resp.NextPage
+
+ // Emergency break after giteaPaginationEBreak pages
+ if page >= giteaPaginationEBreak {
+ break
+ }
+ }
+
+ return changedFiles, nil
+}
+
+// CreateComment creates a comment on the merge request. As far as we're aware, Gitea has no built in max comment length right now.
+func (c *GiteaClient) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) error {
+ logger.Debug("Creating comment on Gitea pull request %d", pullNum)
+
+ opt := gitea.CreateIssueCommentOption{
+ Body: comment,
+ }
+
+ _, resp, err := c.giteaClient.CreateIssueComment(repo.Owner, repo.Name, int64(pullNum), opt)
+
+ if err != nil {
+ logger.Debug("POST /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode)
+ return err
+ }
+
+ logger.Debug("Added comment to Gitea pull request %d: %s", pullNum, comment)
+
+ return nil
+}
+
+// ReactToComment adds a reaction to a comment.
+func (c *GiteaClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error {
+ logger.Debug("Adding reaction to Gitea pull request comment %d", commentID)
+
+ _, resp, err := c.giteaClient.PostIssueCommentReaction(repo.Owner, repo.Name, commentID, reaction)
+
+ if err != nil {
+ logger.Debug("POST /repos/%v/%v/issues/comments/%d/reactions returned: %v", repo.Owner, repo.Name, commentID, resp.StatusCode)
+ return err
+ }
+
+ return nil
+}
+
+// HidePrevCommandComments hides the previous command comments from the pull
+// request.
+func (c *GiteaClient) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error {
+ logger.Debug("Hiding previous command comments on Gitea pull request %d", pullNum)
+
+ var allComments []*gitea.Comment
+
+ nextPage := int(1)
+ for {
+ // Initialize ListIssueCommentOptions with the current page
+ opts := gitea.ListIssueCommentOptions{
+ ListOptions: gitea.ListOptions{
+ Page: nextPage,
+ PageSize: c.pageSize,
+ },
+ }
+
+ comments, resp, err := c.giteaClient.ListIssueComments(repo.Owner, repo.Name, int64(pullNum), opts)
+ if err != nil {
+ logger.Debug("GET /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode)
+ return err
+ }
+
+ allComments = append(allComments, comments...)
+
+ // Break the loop if there are no more pages to fetch
+ if resp.NextPage == 0 {
+ break
+ }
+ nextPage = resp.NextPage
+ }
+
+ currentUser, resp, err := c.giteaClient.GetMyUserInfo()
+ if err != nil {
+ logger.Debug("GET /user returned: %v", resp.StatusCode)
+ return err
+ }
+
+ summaryHeader := fmt.Sprintf("Superseded Atlantis %s ", command)
+ summaryFooter := " "
+ lineFeed := "\n"
+
+ for _, comment := range allComments {
+ if comment.Poster == nil || comment.Poster.UserName != currentUser.UserName {
+ continue
+ }
+
+ body := strings.Split(comment.Body, "\n")
+ if len(body) == 0 || (!strings.Contains(strings.ToLower(body[0]), strings.ToLower(command)) && dir != "" && !strings.Contains(strings.ToLower(body[0]), strings.ToLower(dir))) {
+ continue
+ }
+
+ supersededComment := summaryHeader + lineFeed + comment.Body + lineFeed + summaryFooter + lineFeed
+
+ logger.Debug("Hiding comment %s", comment.ID)
+ _, _, err := c.giteaClient.EditIssueComment(repo.Owner, repo.Name, comment.ID, gitea.EditIssueCommentOption{
+ Body: supersededComment,
+ })
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// PullIsApproved returns ApprovalStatus with IsApproved set to true if the pull request has a review that approved the PR.
+func (c *GiteaClient) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) {
+ logger.Debug("Checking if Gitea pull request %d is approved", pull.Num)
+
+ page := 0
+ nextPage := 1
+
+ approvalStatus := models.ApprovalStatus{
+ IsApproved: false,
+ }
+
+ listOptions := gitea.ListPullReviewsOptions{
+ ListOptions: gitea.ListOptions{
+ Page: 1,
+ PageSize: c.pageSize,
+ },
+ }
+
+ for page < nextPage {
+ page = +1
+ listOptions.ListOptions.Page = page
+ pullReviews, resp, err := c.giteaClient.ListPullReviews(repo.Owner, repo.Name, int64(pull.Num), listOptions)
+
+ if err != nil {
+ logger.Debug("GET /repos/%v/%v/pulls/%d/reviews returned: %v", repo.Owner, repo.Name, pull.Num, resp.StatusCode)
+ return approvalStatus, err
+ }
+
+ for _, review := range pullReviews {
+ if review.State == gitea.ReviewStateApproved {
+ approvalStatus.IsApproved = true
+ approvalStatus.ApprovedBy = review.Reviewer.UserName
+ approvalStatus.Date = review.Submitted
+
+ return approvalStatus, nil
+ }
+ }
+
+ nextPage = resp.NextPage
+
+ // Emergency break after giteaPaginationEBreak pages
+ if page >= giteaPaginationEBreak {
+ break
+ }
+ }
+
+ return approvalStatus, nil
+}
+
+// PullIsMergeable returns true if the pull request is mergeable
+func (c *GiteaClient) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, vcsstatusname string) (bool, error) {
+ logger.Debug("Checking if Gitea pull request %d is mergeable", pull.Num)
+
+ pullRequest, _, err := c.giteaClient.GetPullRequest(repo.Owner, repo.Name, int64(pull.Num))
+
+ if err != nil {
+ return false, err
+ }
+
+ logger.Debug("Gitea pull request is mergeable: %v (%v)", pullRequest.Mergeable, pull.Num)
+
+ return pullRequest.Mergeable, nil
+}
+
+// UpdateStatus updates the commit status to state for pull. src is the
+// source of this status. This should be relatively static across runs,
+// ex. atlantis/plan or atlantis/apply.
+// description is a description of this particular status update and can
+// change across runs.
+// url is an optional link that users should click on for more information
+// about this status.
+func (c *GiteaClient) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error {
+ giteaState := gitea.StatusFailure
+
+ switch state {
+ case models.PendingCommitStatus:
+ giteaState = gitea.StatusPending
+ case models.SuccessCommitStatus:
+ giteaState = gitea.StatusSuccess
+ case models.FailedCommitStatus:
+ giteaState = gitea.StatusFailure
+ }
+
+ logger.Debug("Updating status on Gitea pull request %d for '%s' to '%s'", pull.Num, description, state)
+
+ newStatusOption := gitea.CreateStatusOption{
+ State: giteaState,
+ TargetURL: url,
+ Description: description,
+ }
+
+ _, resp, err := c.giteaClient.CreateStatus(repo.Owner, repo.Name, pull.HeadCommit, newStatusOption)
+
+ if err != nil {
+ logger.Debug("POST /repos/%v/%v/statuses/%s returned: %v", repo.Owner, repo.Name, pull.HeadCommit, resp.StatusCode)
+ return err
+ }
+
+ logger.Debug("Gitea status for pull request updated: %v (%v)", state, pull.Num)
+
+ return nil
+}
+
+// DiscardReviews discards / dismisses all pull request reviews
+func (c *GiteaClient) DiscardReviews(repo models.Repo, pull models.PullRequest) error {
+ page := 0
+ nextPage := 1
+
+ dismissOptions := gitea.DismissPullReviewOptions{
+ Message: "Dismissed by Atlantis",
+ }
+
+ listOptions := gitea.ListPullReviewsOptions{
+ ListOptions: gitea.ListOptions{
+ Page: 1,
+ PageSize: c.pageSize,
+ },
+ }
+
+ for page < nextPage {
+ page = +1
+ listOptions.ListOptions.Page = page
+ pullReviews, resp, err := c.giteaClient.ListPullReviews(repo.Owner, repo.Name, int64(pull.Num), listOptions)
+
+ if err != nil {
+ return err
+ }
+
+ for _, review := range pullReviews {
+ _, err := c.giteaClient.DismissPullReview(repo.Owner, repo.Name, int64(pull.Num), review.ID, dismissOptions)
+
+ if err != nil {
+ return err
+ }
+ }
+
+ nextPage = resp.NextPage
+
+ // Emergency break after giteaPaginationEBreak pages
+ if page >= giteaPaginationEBreak {
+ break
+ }
+ }
+
+ return nil
+}
+
+func (c *GiteaClient) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error {
+ logger.Debug("Merging Gitea pull request %d", pull.Num)
+
+ mergeOptions := gitea.MergePullRequestOption{
+ Style: gitea.MergeStyleMerge,
+ Title: "Atlantis merge",
+ Message: "Automatic merge by Atlantis",
+ DeleteBranchAfterMerge: pullOptions.DeleteSourceBranchOnMerge,
+ ForceMerge: false,
+ HeadCommitId: pull.HeadCommit,
+ MergeWhenChecksSucceed: false,
+ }
+
+ succeeded, resp, err := c.giteaClient.MergePullRequest(pull.BaseRepo.Owner, pull.BaseRepo.Name, int64(pull.Num), mergeOptions)
+
+ if err != nil {
+ logger.Debug("POST /repos/%v/%v/pulls/%d/merge returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, pull.Num, resp.StatusCode)
+ return err
+ }
+
+ if !succeeded {
+ return fmt.Errorf("merge failed: %s", resp.Status)
+ }
+
+ return nil
+}
+
+// MarkdownPullLink specifies the string used in a pull request comment to reference another pull request.
+func (c *GiteaClient) MarkdownPullLink(pull models.PullRequest) (string, error) {
+ return fmt.Sprintf("#%d", pull.Num), nil
+}
+
+// GetTeamNamesForUser returns the names of the teams or groups that the user belongs to (in the organization the repository belongs to).
+func (c *GiteaClient) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) {
+ // TODO: implement
+ return nil, errors.New("GetTeamNamesForUser not (yet) implemented for Gitea client")
+}
+
+// GetFileContent a repository file content from VCS (which support fetch a single file from repository)
+// The first return value indicates whether the repo contains a file or not
+// if BaseRepo had a file, its content will placed on the second return value
+func (c *GiteaClient) GetFileContent(logger logging.SimpleLogging, pull models.PullRequest, fileName string) (bool, []byte, error) {
+ logger.Debug("Getting file content for %s in Gitea pull request %d", fileName, pull.Num)
+
+ content, resp, err := c.giteaClient.GetContents(pull.BaseRepo.Owner, pull.BaseRepo.Name, pull.HeadCommit, fileName)
+
+ if err != nil {
+ logger.Debug("GET /repos/%v/%v/contents/%s?ref=%v returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, fileName, pull.HeadCommit, resp.StatusCode)
+ return false, nil, err
+ }
+
+ if content.Type == "file" {
+ decodedData, err := base64.StdEncoding.DecodeString(*content.Content)
+ if err != nil {
+ return true, []byte{}, err
+ }
+ return true, decodedData, nil
+ }
+
+ return false, nil, nil
+}
+
+// SupportsSingleFileDownload returns true if the VCS supports downloading a single file
+func (c *GiteaClient) SupportsSingleFileDownload(repo models.Repo) bool {
+ return true
+}
+
+// GetCloneURL returns the clone URL of the repo
+func (c *GiteaClient) GetCloneURL(logger logging.SimpleLogging, _ models.VCSHostType, repo string) (string, error) {
+ logger.Debug("Getting clone URL for %s", repo)
+
+ parts := strings.Split(repo, "/")
+ if len(parts) < 2 {
+ return "", errors.New("invalid repo format, expected 'owner/repo'")
+ }
+ repository, _, err := c.giteaClient.GetRepo(parts[0], parts[1])
+ if err != nil {
+ logger.Debug("GET /repos/%v/%v returned an error: %v", parts[0], parts[1], err)
+ return "", err
+ }
+ return repository.CloneURL, nil
+}
+
+// GetPullLabels returns the labels of a pull request
+func (c *GiteaClient) GetPullLabels(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) {
+ logger.Debug("Getting labels for Gitea pull request %d", pull.Num)
+
+ page := 0
+ nextPage := 1
+ results := make([]string, 0)
+
+ opts := gitea.ListLabelsOptions{
+ ListOptions: gitea.ListOptions{
+ Page: 0,
+ PageSize: c.pageSize,
+ },
+ }
+
+ for page < nextPage {
+ page = +1
+ opts.ListOptions.Page = page
+
+ labels, resp, err := c.giteaClient.GetIssueLabels(repo.Owner, repo.Name, int64(pull.Num), opts)
+
+ if err != nil {
+ logger.Debug("GET /repos/%v/%v/issues/%d/labels?%v returned: %v", repo.Owner, repo.Name, pull.Num, "unknown", resp.StatusCode)
+ return nil, err
+ }
+
+ for _, label := range labels {
+ results = append(results, label.Name)
+ }
+
+ nextPage = resp.NextPage
+
+ // Emergency break after giteaPaginationEBreak pages
+ if page >= giteaPaginationEBreak {
+ break
+ }
+ }
+
+ return results, nil
+}
+
+func ValidateSignature(payload []byte, signature string, secretKey []byte) error {
+ isValid, err := gitea.VerifyWebhookSignature(string(secretKey), signature, payload)
+ if err != nil {
+ return errors.New("signature verification internal error")
+ }
+ if !isValid {
+ return errors.New("invalid signature")
+ }
+
+ return nil
+}
diff --git a/server/events/vcs/gitea/models.go b/server/events/vcs/gitea/models.go
new file mode 100644
index 0000000000..e624578e24
--- /dev/null
+++ b/server/events/vcs/gitea/models.go
@@ -0,0 +1,30 @@
+// Copyright 2024 Florian Beisel
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package gitea
+
+import "code.gitea.io/sdk/gitea"
+
+type GiteaWebhookPayload struct {
+ Action string `json:"action"`
+ Number int `json:"number"`
+ PullRequest gitea.PullRequest `json:"pull_request"`
+}
+
+type GiteaIssueCommentPayload struct {
+ Action string `json:"action"`
+ Comment gitea.Comment `json:"comment"`
+ Repository gitea.Repository `json:"repository"`
+ Issue gitea.Issue `json:"issue"`
+}
diff --git a/server/events/vcs/github_client.go b/server/events/vcs/github_client.go
index 3ceef6c5e7..f84c2a27fd 100644
--- a/server/events/vcs/github_client.go
+++ b/server/events/vcs/github_client.go
@@ -76,7 +76,7 @@ type GithubPRReviewSummary struct {
}
// NewGithubClient returns a valid GitHub client.
-func NewGithubClient(hostname string, credentials GithubCredentials, config GithubConfig, logger logging.SimpleLogging) (*GithubClient, error) { //nolint:staticcheck
+func NewGithubClient(hostname string, credentials GithubCredentials, config GithubConfig, logger logging.SimpleLogging) (*GithubClient, error) {
logger.Debug("Creating new GitHub client for host: %s", hostname)
transport, err := credentials.Client()
if err != nil {
diff --git a/server/events/vcs/proxy.go b/server/events/vcs/proxy.go
index d3d60b03fb..cd67b84c90 100644
--- a/server/events/vcs/proxy.go
+++ b/server/events/vcs/proxy.go
@@ -26,7 +26,7 @@ type ClientProxy struct {
clients map[models.VCSHostType]Client
}
-func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClient Client, bitbucketServerClient Client, azuredevopsClient Client) *ClientProxy {
+func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClient Client, bitbucketServerClient Client, azuredevopsClient Client, giteaClient Client) *ClientProxy {
if githubClient == nil {
githubClient = &NotConfiguredVCSClient{}
}
@@ -42,6 +42,9 @@ func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClie
if azuredevopsClient == nil {
azuredevopsClient = &NotConfiguredVCSClient{}
}
+ if giteaClient == nil {
+ giteaClient = &NotConfiguredVCSClient{}
+ }
return &ClientProxy{
clients: map[models.VCSHostType]Client{
models.Github: githubClient,
@@ -49,6 +52,7 @@ func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClie
models.BitbucketCloud: bitbucketCloudClient,
models.BitbucketServer: bitbucketServerClient,
models.AzureDevops: azuredevopsClient,
+ models.Gitea: giteaClient,
},
}
}
diff --git a/server/events/working_dir.go b/server/events/working_dir.go
index 886b3c4b40..c3ebe56e80 100644
--- a/server/events/working_dir.go
+++ b/server/events/working_dir.go
@@ -41,23 +41,23 @@ type WorkingDir interface {
// absolute path to the root of the cloned repo. It also returns
// a boolean indicating if we should warn users that the branch we're
// merging into has been updated since we cloned it.
- Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error)
+ Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error)
// GetWorkingDir returns the path to the workspace for this repo and pull.
// If workspace does not exist on disk, error will be of type os.IsNotExist.
GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error)
- HasDiverged(cloneDir string) bool
+ HasDiverged(logger logging.SimpleLogging, cloneDir string) bool
GetPullDir(r models.Repo, p models.PullRequest) (string, error)
// Delete deletes the workspace for this repo and pull.
- Delete(r models.Repo, p models.PullRequest) error
- DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error
+ Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error
+ DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error
// Set a flag in the workingdir so Clone() can know that it is safe to re-clone the workingdir if
// the upstream branch has been modified. This is only safe after grabbing the project lock
// and before running any plans
SetCheckForUpstreamChanges()
// DeletePlan deletes the plan for this repo, pull, workspace path and project name
- DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error
+ DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error
// GetGitUntrackedFiles returns a list of Git untracked files in the working dir.
- GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error)
+ GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error)
}
// FileWorkspace implements WorkingDir with the file system.
@@ -86,7 +86,6 @@ type FileWorkspace struct {
GpgNoSigningEnabled bool
// flag indicating if we have to merge with potential new changes upstream (directly after grabbing project lock)
CheckForUpstreamChanges bool
- Logger logging.SimpleLogging
}
// Clone git clones headRepo, checks out the branch and then returns the absolute
@@ -95,10 +94,7 @@ type FileWorkspace struct {
// If the repo already exists and is at
// the right commit it does nothing. This is to support running commands in
// multiple dirs of the same repo without deleting existing plans.
-func (w *FileWorkspace) Clone(
- headRepo models.Repo,
- p models.PullRequest,
- workspace string) (string, bool, error) {
+func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) {
cloneDir := w.cloneDir(p.BaseRepo, p, workspace)
defer func() { w.CheckForUpstreamChanges = false }()
@@ -106,7 +102,7 @@ func (w *FileWorkspace) Clone(
// If the directory already exists, check if it's at the right commit.
// If so, then we do nothing.
if _, err := os.Stat(cloneDir); err == nil {
- w.Logger.Debug("clone directory %q already exists, checking if it's at the right commit", cloneDir)
+ logger.Debug("clone directory '%s' already exists, checking if it's at the right commit", cloneDir)
// We use git rev-parse to see if our repo is at the right commit.
// If just checking out the pull request branch, we can use HEAD.
@@ -121,28 +117,28 @@ func (w *FileWorkspace) Clone(
revParseCmd.Dir = cloneDir
outputRevParseCmd, err := revParseCmd.CombinedOutput()
if err != nil {
- w.Logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd))
- return cloneDir, false, w.forceClone(c)
+ logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd))
+ return cloneDir, false, w.forceClone(logger, c)
}
currCommit := strings.Trim(string(outputRevParseCmd), "\n")
// We're prefix matching here because BitBucket doesn't give us the full
// commit, only a 12 character prefix.
if strings.HasPrefix(currCommit, p.HeadCommit) {
- if w.CheckForUpstreamChanges && w.CheckoutMerge && w.recheckDiverged(p, headRepo, cloneDir) {
- w.Logger.Info("base branch has been updated, using merge strategy and will clone again")
- return cloneDir, true, w.mergeAgain(c)
+ if w.CheckForUpstreamChanges && w.CheckoutMerge && w.recheckDiverged(logger, p, headRepo, cloneDir) {
+ logger.Info("base branch has been updated, using merge strategy and will clone again")
+ return cloneDir, true, w.mergeAgain(logger, c)
}
- w.Logger.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit)
+ logger.Debug("repo is at correct commit '%s' so will not re-clone", p.HeadCommit)
return cloneDir, false, nil
} else {
- w.Logger.Debug("repo was already cloned but is not at correct commit, wanted %q got %q", p.HeadCommit, currCommit)
+ logger.Debug("repo was already cloned but is not at correct commit, wanted '%s' got '%s'", p.HeadCommit, currCommit)
}
// We'll fall through to re-clone.
}
// Otherwise we clone the repo.
- return cloneDir, false, w.forceClone(c)
+ return cloneDir, false, w.forceClone(logger, c)
}
// recheckDiverged returns true if the branch we're merging into has diverged
@@ -152,7 +148,7 @@ func (w *FileWorkspace) Clone(
// and we have to perform a new merge.
// If there are any errors we return false since we prefer things to succeed
// vs. stopping the plan/apply.
-func (w *FileWorkspace) recheckDiverged(p models.PullRequest, headRepo models.Repo, cloneDir string) bool {
+func (w *FileWorkspace) recheckDiverged(logger logging.SimpleLogging, p models.PullRequest, headRepo models.Repo, cloneDir string) bool {
if !w.CheckoutMerge {
// It only makes sense to warn that main has diverged if we're using
// the checkout merge strategy. If we're just checking out the branch,
@@ -185,15 +181,15 @@ func (w *FileWorkspace) recheckDiverged(p models.PullRequest, headRepo models.Re
output, err := cmd.CombinedOutput()
if err != nil {
- w.Logger.Warn("getting remote update failed: %s", string(output))
+ logger.Warn("getting remote update failed: %s", string(output))
return false
}
}
- return w.HasDiverged(cloneDir)
+ return w.HasDiverged(logger, cloneDir)
}
-func (w *FileWorkspace) HasDiverged(cloneDir string) bool {
+func (w *FileWorkspace) HasDiverged(logger logging.SimpleLogging, cloneDir string) bool {
if !w.CheckoutMerge {
// Both the diverged warning and the UnDiverged apply requirement only apply to merge checkout strategy so
// we assume false here for 'branch' strategy.
@@ -204,7 +200,7 @@ func (w *FileWorkspace) HasDiverged(cloneDir string) bool {
statusFetchCmd.Dir = cloneDir
outputStatusFetch, err := statusFetchCmd.CombinedOutput()
if err != nil {
- w.Logger.Warn("fetching repo has failed: %s", string(outputStatusFetch))
+ logger.Warn("fetching repo has failed: %s", string(outputStatusFetch))
return false
}
@@ -213,14 +209,14 @@ func (w *FileWorkspace) HasDiverged(cloneDir string) bool {
statusUnoCmd.Dir = cloneDir
outputStatusUno, err := statusUnoCmd.CombinedOutput()
if err != nil {
- w.Logger.Warn("getting repo status has failed: %s", string(outputStatusUno))
+ logger.Warn("getting repo status has failed: %s", string(outputStatusUno))
return false
}
hasDiverged := strings.Contains(string(outputStatusUno), "have diverged")
return hasDiverged
}
-func (w *FileWorkspace) forceClone(c wrappedGitContext) error {
+func (w *FileWorkspace) forceClone(logger logging.SimpleLogging, c wrappedGitContext) error {
value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex))
mutex := value.(*sync.Mutex)
@@ -232,11 +228,11 @@ func (w *FileWorkspace) forceClone(c wrappedGitContext) error {
err := os.RemoveAll(c.dir)
if err != nil {
- return errors.Wrapf(err, "deleting dir %q before cloning", c.dir)
+ return errors.Wrapf(err, "deleting dir '%s' before cloning", c.dir)
}
// Create the directory and parents if necessary.
- w.Logger.Info("creating dir %q", c.dir)
+ logger.Info("creating dir '%s'", c.dir)
if err := os.MkdirAll(c.dir, 0700); err != nil {
return errors.Wrap(err, "creating new workspace")
}
@@ -253,37 +249,37 @@ func (w *FileWorkspace) forceClone(c wrappedGitContext) error {
// if branch strategy, use depth=1
if !w.CheckoutMerge {
- return w.wrappedGit(c, "clone", "--depth=1", "--branch", c.pr.HeadBranch, "--single-branch", headCloneURL, c.dir)
+ return w.wrappedGit(logger, c, "clone", "--depth=1", "--branch", c.pr.HeadBranch, "--single-branch", headCloneURL, c.dir)
}
// if merge strategy...
// if no checkout depth, omit depth arg
if w.CheckoutDepth == 0 {
- if err := w.wrappedGit(c, "clone", "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil {
+ if err := w.wrappedGit(logger, c, "clone", "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil {
return err
}
} else {
- if err := w.wrappedGit(c, "clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil {
+ if err := w.wrappedGit(logger, c, "clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil {
return err
}
}
- if err := w.wrappedGit(c, "remote", "add", "head", headCloneURL); err != nil {
+ if err := w.wrappedGit(logger, c, "remote", "add", "head", headCloneURL); err != nil {
return err
}
if w.GpgNoSigningEnabled {
- if err := w.wrappedGit(c, "config", "--local", "commit.gpgsign", "false"); err != nil {
+ if err := w.wrappedGit(logger, c, "config", "--local", "commit.gpgsign", "false"); err != nil {
return err
}
}
- return w.mergeToBaseBranch(c)
+ return w.mergeToBaseBranch(logger, c)
}
// There is a new upstream update that we need, and we want to update to it
// without deleting any existing plans
-func (w *FileWorkspace) mergeAgain(c wrappedGitContext) error {
+func (w *FileWorkspace) mergeAgain(logger logging.SimpleLogging, c wrappedGitContext) error {
value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex))
mutex := value.(*sync.Mutex)
@@ -294,11 +290,11 @@ func (w *FileWorkspace) mergeAgain(c wrappedGitContext) error {
}
// Reset branch as if it was cloned again
- if err := w.wrappedGit(c, "reset", "--hard", fmt.Sprintf("refs/remotes/origin/%s", c.pr.BaseBranch)); err != nil {
+ if err := w.wrappedGit(logger, c, "reset", "--hard", fmt.Sprintf("refs/remotes/origin/%s", c.pr.BaseBranch)); err != nil {
return err
}
- return w.mergeToBaseBranch(c)
+ return w.mergeToBaseBranch(logger, c)
}
// wrappedGitContext is the configuration for wrappedGit that is typically unchanged
@@ -311,7 +307,7 @@ type wrappedGitContext struct {
// wrappedGit runs git with additional environment settings required for git merge,
// and with sanitized error logging to avoid leaking git credentials
-func (w *FileWorkspace) wrappedGit(c wrappedGitContext, args ...string) error {
+func (w *FileWorkspace) wrappedGit(logger logging.SimpleLogging, c wrappedGitContext, args ...string) error {
cmd := exec.Command("git", args...) // nolint: gosec
cmd.Dir = c.dir
// The git merge command requires these env vars are set.
@@ -327,12 +323,12 @@ func (w *FileWorkspace) wrappedGit(c wrappedGitContext, args ...string) error {
sanitizedErrMsg := w.sanitizeGitCredentials(err.Error(), c.pr.BaseRepo, c.head)
return fmt.Errorf("running %s: %s: %s", cmdStr, sanitizedOutput, sanitizedErrMsg)
}
- w.Logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n"))
+ logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n"))
return nil
}
// Merge the PR into the base branch.
-func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error {
+func (w *FileWorkspace) mergeToBaseBranch(logger logging.SimpleLogging, c wrappedGitContext) error {
fetchRef := fmt.Sprintf("+refs/heads/%s:", c.pr.HeadBranch)
fetchRemote := "head"
if w.GithubAppEnabled {
@@ -342,19 +338,19 @@ func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error {
// if no checkout depth, omit depth arg
if w.CheckoutDepth == 0 {
- if err := w.wrappedGit(c, "fetch", fetchRemote, fetchRef); err != nil {
+ if err := w.wrappedGit(logger, c, "fetch", fetchRemote, fetchRef); err != nil {
return err
}
} else {
- if err := w.wrappedGit(c, "fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil {
+ if err := w.wrappedGit(logger, c, "fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil {
return err
}
}
- if err := w.wrappedGit(c, "merge-base", c.pr.BaseBranch, "FETCH_HEAD"); err != nil {
+ if err := w.wrappedGit(logger, c, "merge-base", c.pr.BaseBranch, "FETCH_HEAD"); err != nil {
// git merge-base returning error means that we did not receive enough commits in shallow clone.
// Fall back to retrieving full repo history.
- if err := w.wrappedGit(c, "fetch", "--unshallow"); err != nil {
+ if err := w.wrappedGit(logger, c, "fetch", "--unshallow"); err != nil {
return err
}
}
@@ -365,7 +361,7 @@ func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error {
// git rev-parse HEAD^2 to get the head commit because it will
// always succeed whereas without --no-ff, if the merge was fast
// forwarded then git rev-parse HEAD^2 would fail.
- return w.wrappedGit(c, "merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD")
+ return w.wrappedGit(logger, c, "merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD")
}
// GetWorkingDir returns the path to the workspace for this repo and pull.
@@ -388,16 +384,16 @@ func (w *FileWorkspace) GetPullDir(r models.Repo, p models.PullRequest) (string,
}
// Delete deletes the workspace for this repo and pull.
-func (w *FileWorkspace) Delete(r models.Repo, p models.PullRequest) error {
+func (w *FileWorkspace) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error {
repoPullDir := w.repoPullDir(r, p)
- w.Logger.Info("Deleting repo pull directory: " + repoPullDir)
+ logger.Info("Deleting repo pull directory: " + repoPullDir)
return os.RemoveAll(repoPullDir)
}
// DeleteForWorkspace deletes the working dir for this workspace.
-func (w *FileWorkspace) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error {
+func (w *FileWorkspace) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error {
workspaceDir := w.cloneDir(r, p, workspace)
- w.Logger.Info("Deleting workspace directory: " + workspaceDir)
+ logger.Info("Deleting workspace directory: " + workspaceDir)
return os.RemoveAll(workspaceDir)
}
@@ -421,20 +417,20 @@ func (w *FileWorkspace) SetCheckForUpstreamChanges() {
w.CheckForUpstreamChanges = true
}
-func (w *FileWorkspace) DeletePlan(r models.Repo, p models.PullRequest, workspace string, projectPath string, projectName string) error {
+func (w *FileWorkspace) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, projectPath string, projectName string) error {
planPath := filepath.Join(w.cloneDir(r, p, workspace), projectPath, runtime.GetPlanFilename(workspace, projectName))
- w.Logger.Info("Deleting plan: " + planPath)
+ logger.Info("Deleting plan: " + planPath)
return os.Remove(planPath)
}
// getGitUntrackedFiles returns a list of Git untracked files in the working dir.
-func (w *FileWorkspace) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) {
+func (w *FileWorkspace) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) {
workingDir, err := w.GetWorkingDir(r, p, workspace)
if err != nil {
return nil, err
}
- w.Logger.Debug("Checking for Git untracked files in directory: '%s'", workingDir)
+ logger.Debug("Checking for Git untracked files in directory: '%s'", workingDir)
cmd := exec.Command("git", "ls-files", "--others", "--exclude-standard")
cmd.Dir = workingDir
@@ -444,6 +440,6 @@ func (w *FileWorkspace) GetGitUntrackedFiles(r models.Repo, p models.PullRequest
}
untrackedFiles := strings.Split(string(output), "\n")[:]
- w.Logger.Debug("Untracked files: '%s'", strings.Join(untrackedFiles, ","))
+ logger.Debug("Untracked files: '%s'", strings.Join(untrackedFiles, ","))
return untrackedFiles, nil
}
diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go
index f277c12e6b..e25c420100 100644
--- a/server/events/working_dir_test.go
+++ b/server/events/working_dir_test.go
@@ -43,10 +43,9 @@ func TestClone_NoneExisting(t *testing.T) {
CheckoutMerge: false,
TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir),
GpgNoSigningEnabled: true,
- Logger: logger,
}
- cloneDir, _, err := wd.Clone(models.Repo{}, models.PullRequest{
+ cloneDir, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
}, "default")
@@ -96,10 +95,9 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) {
TestingOverrideHeadCloneURL: overrideURL,
TestingOverrideBaseCloneURL: overrideURL,
GpgNoSigningEnabled: true,
- Logger: logger,
}
- cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
BaseBranch: "main",
@@ -148,10 +146,9 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) {
TestingOverrideHeadCloneURL: overrideURL,
TestingOverrideBaseCloneURL: overrideURL,
GpgNoSigningEnabled: true,
- Logger: logger,
}
- _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
BaseBranch: "main",
@@ -163,7 +160,7 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) {
runCmd(t, dataDir, "touch", "repos/0/default/proof")
// Now run the clone again.
- cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
BaseBranch: "main",
@@ -201,10 +198,9 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) {
TestingOverrideHeadCloneURL: overrideURL,
TestingOverrideBaseCloneURL: overrideURL,
GpgNoSigningEnabled: true,
- Logger: logger,
}
- _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
BaseBranch: "main",
@@ -216,7 +212,7 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) {
runCmd(t, dataDir, "touch", "repos/0/default/proof")
// Now run the clone again.
- cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
BaseBranch: "main",
@@ -259,10 +255,9 @@ func TestClone_CheckoutMergeConflict(t *testing.T) {
TestingOverrideHeadCloneURL: overrideURL,
TestingOverrideBaseCloneURL: overrideURL,
GpgNoSigningEnabled: true,
- Logger: logger,
}
- _, _, err := wd.Clone(models.Repo{}, models.PullRequest{
+ _, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
BaseBranch: "main",
@@ -319,10 +314,9 @@ func TestClone_CheckoutMergeShallow(t *testing.T) {
TestingOverrideHeadCloneURL: overrideURL,
TestingOverrideBaseCloneURL: overrideURL,
GpgNoSigningEnabled: true,
- Logger: logger,
}
- cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
BaseBranch: "main",
@@ -350,10 +344,9 @@ func TestClone_CheckoutMergeShallow(t *testing.T) {
TestingOverrideHeadCloneURL: overrideURL,
TestingOverrideBaseCloneURL: overrideURL,
GpgNoSigningEnabled: true,
- Logger: logger,
}
- cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
BaseBranch: "main",
@@ -387,9 +380,8 @@ func TestClone_NoReclone(t *testing.T) {
CheckoutMerge: false,
TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir),
GpgNoSigningEnabled: true,
- Logger: logger,
}
- cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
}, "default")
@@ -432,9 +424,8 @@ func TestClone_RecloneWrongCommit(t *testing.T) {
CheckoutMerge: false,
TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir),
GpgNoSigningEnabled: true,
- Logger: logger,
}
- cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "branch",
HeadCommit: expCommit,
@@ -506,7 +497,6 @@ func TestClone_MasterHasDiverged(t *testing.T) {
CheckoutMerge: false,
CheckoutDepth: 50,
GpgNoSigningEnabled: true,
- Logger: logger,
}
// Pretend terraform has created a plan file, we'll check for it later
@@ -518,7 +508,7 @@ func TestClone_MasterHasDiverged(t *testing.T) {
// Run the clone without the checkout merge strategy. It should return
// false for mergedAgain
- _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{
+ _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{
BaseRepo: models.Repo{},
HeadBranch: "second-pr",
BaseBranch: "main",
@@ -532,7 +522,7 @@ func TestClone_MasterHasDiverged(t *testing.T) {
// Run the clone twice with the merge strategy, the first run should
// return true for mergedAgain, subsequent runs should
// return false since the first call is supposed to merge.
- _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{
+ _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{
BaseRepo: models.Repo{CloneURL: repoDir},
HeadBranch: "second-pr",
BaseBranch: "main",
@@ -542,7 +532,7 @@ func TestClone_MasterHasDiverged(t *testing.T) {
Assert(t, mergedAgain == true, "First clone with CheckoutMerge=true with diverged base should have merged")
wd.SetCheckForUpstreamChanges()
- _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{
+ _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{
BaseRepo: models.Repo{CloneURL: repoDir},
HeadBranch: "second-pr",
BaseBranch: "main",
@@ -610,15 +600,14 @@ func TestHasDiverged_MasterHasDiverged(t *testing.T) {
CheckoutMerge: true,
CheckoutDepth: 50,
GpgNoSigningEnabled: true,
- Logger: logger,
}
- hasDiverged := wd.HasDiverged(repoDir + "/repos/0/default")
+ hasDiverged := wd.HasDiverged(logger, repoDir+"/repos/0/default")
Equals(t, hasDiverged, true)
// Run it again but without the checkout merge strategy. It should return
// false.
wd.CheckoutMerge = false
- hasDiverged = wd.HasDiverged(repoDir + "/repos/0/default")
+ hasDiverged = wd.HasDiverged(logger, repoDir+"/repos/0/default")
Equals(t, hasDiverged, false)
}
diff --git a/server/logging/simple_logger.go b/server/logging/simple_logger.go
index e7d18e5654..5003a1fda0 100644
--- a/server/logging/simple_logger.go
+++ b/server/logging/simple_logger.go
@@ -19,7 +19,6 @@ package logging
import (
"bytes"
"fmt"
- "testing"
"github.com/pkg/errors"
"go.uber.org/zap"
@@ -184,7 +183,7 @@ func (l *StructuredLogger) saveToHistory(lvl LogLevel, format string, a ...inter
// NewNoopLogger creates a logger instance that discards all logs and never
// writes them. Used for testing.
-func NewNoopLogger(t *testing.T) SimpleLogging {
+func NewNoopLogger(t zaptest.TestingT) SimpleLogging {
level := zap.DebugLevel
return &StructuredLogger{
z: zaptest.NewLogger(t, zaptest.Level(level)).Sugar(),
diff --git a/server/server.go b/server/server.go
index eeab9d732e..4bae892b0a 100644
--- a/server/server.go
+++ b/server/server.go
@@ -50,7 +50,7 @@ import (
"github.com/pkg/errors"
"github.com/runatlantis/atlantis/server/controllers"
events_controllers "github.com/runatlantis/atlantis/server/controllers/events"
- "github.com/runatlantis/atlantis/server/controllers/templates"
+ "github.com/runatlantis/atlantis/server/controllers/web_templates"
"github.com/runatlantis/atlantis/server/controllers/websocket"
"github.com/runatlantis/atlantis/server/core/locking"
"github.com/runatlantis/atlantis/server/core/runtime"
@@ -62,6 +62,7 @@ import (
"github.com/runatlantis/atlantis/server/events/vcs"
"github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud"
"github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver"
+ "github.com/runatlantis/atlantis/server/events/vcs/gitea"
"github.com/runatlantis/atlantis/server/events/webhooks"
"github.com/runatlantis/atlantis/server/logging"
)
@@ -106,10 +107,10 @@ type Server struct {
StatusController *controllers.StatusController
JobsController *controllers.JobsController
APIController *controllers.APIController
- IndexTemplate templates.TemplateWriter
- LockDetailTemplate templates.TemplateWriter
- ProjectJobsTemplate templates.TemplateWriter
- ProjectJobsErrorTemplate templates.TemplateWriter
+ IndexTemplate web_templates.TemplateWriter
+ LockDetailTemplate web_templates.TemplateWriter
+ ProjectJobsTemplate web_templates.TemplateWriter
+ ProjectJobsErrorTemplate web_templates.TemplateWriter
SSLCertFile string
SSLKeyFile string
CertLastRefreshTime time.Time
@@ -176,6 +177,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
var bitbucketCloudClient *bitbucketcloud.Client
var bitbucketServerClient *bitbucketserver.Client
var azuredevopsClient *vcs.AzureDevopsClient
+ var giteaClient *gitea.GiteaClient
policyChecksEnabled := false
if userConfig.EnablePolicyChecksFlag {
@@ -300,6 +302,19 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
return nil, err
}
}
+ if userConfig.GiteaToken != "" {
+ supportedVCSHosts = append(supportedVCSHosts, models.Gitea)
+
+ giteaClient, err = gitea.NewClient(userConfig.GiteaBaseURL, userConfig.GiteaUser, userConfig.GiteaToken, userConfig.GiteaPageSize, logger)
+ if err != nil {
+ fmt.Println("error setting up gitea client", "error", err)
+ return nil, errors.Wrapf(err, "setting up Gitea client")
+ } else {
+ logger.Info("gitea client configured successfully")
+ }
+ }
+
+ logger.Info("Supported VCS Hosts", "hosts", supportedVCSHosts)
home, err := homedir.Dir()
if err != nil {
@@ -333,6 +348,11 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
return nil, err
}
}
+ if userConfig.GiteaUser != "" {
+ if err := vcs.WriteGitCreds(userConfig.GiteaUser, userConfig.GiteaToken, userConfig.GiteaBaseURL, home, logger, false); err != nil {
+ return nil, err
+ }
+ }
}
// default the project files used to generate the module index to the autoplan-file-list if autoplan-modules is true
@@ -356,7 +376,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
if err != nil {
return nil, errors.Wrap(err, "initializing webhooks")
}
- vcsClient := vcs.NewClientProxy(githubClient, gitlabClient, bitbucketCloudClient, bitbucketServerClient, azuredevopsClient)
+ vcsClient := vcs.NewClientProxy(githubClient, gitlabClient, bitbucketCloudClient, bitbucketServerClient, azuredevopsClient, giteaClient)
commitStatusUpdater := &events.DefaultCommitStatusUpdater{Client: vcsClient, StatusName: userConfig.VCSStatusName}
binDir, err := mkSubDir(userConfig.DataDir, BinDirName)
@@ -469,7 +489,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
CheckoutMerge: userConfig.CheckoutStrategy == "merge",
CheckoutDepth: userConfig.CheckoutDepth,
GithubAppEnabled: githubAppEnabled,
- Logger: logger,
}
scheduledExecutorService := scheduled.NewExecutorService(
@@ -503,7 +522,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
}
deleteLockCommand := &events.DefaultDeleteLockCommand{
Locker: lockingClient,
- Logger: logger,
WorkingDir: workingDir,
WorkingDirLocker: workingDirLocker,
Backend: backend,
@@ -515,7 +533,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
&events.PullClosedExecutor{
Locker: lockingClient,
WorkingDir: workingDir,
- Logger: logger,
Backend: backend,
PullClosedTemplate: &events.PullClosedEventTemplate{},
LogStreamResourceCleaner: projectCmdOutputHandler,
@@ -528,6 +545,8 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
GithubToken: userConfig.GithubToken,
GitlabUser: userConfig.GitlabUser,
GitlabToken: userConfig.GitlabToken,
+ GiteaUser: userConfig.GiteaUser,
+ GiteaToken: userConfig.GiteaToken,
AllowDraftPRs: userConfig.PlanDrafts,
BitbucketUser: userConfig.BitbucketUser,
BitbucketToken: userConfig.BitbucketToken,
@@ -538,6 +557,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
commentParser := events.NewCommentParser(
userConfig.GithubUser,
userConfig.GitlabUser,
+ userConfig.GiteaUser,
userConfig.BitbucketUser,
userConfig.AzureDevopsUser,
userConfig.ExecutableName,
@@ -601,7 +621,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
userConfig.IncludeGitUntrackedFiles,
userConfig.AutoDiscoverModeFlag,
statsScope,
- logger,
terraformClient,
)
@@ -798,6 +817,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
GithubPullGetter: githubClient,
GitlabMergeRequestGetter: gitlabClient,
AzureDevopsPullGetter: azuredevopsClient,
+ GiteaPullGetter: giteaClient,
CommentCommandRunnerByCmd: commentCommandRunnerByCmd,
EventParser: eventParser,
FailOnPreWorkflowHookError: userConfig.FailOnPreWorkflowHookError,
@@ -829,7 +849,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
ApplyLocker: applyLockingClient,
Logger: logger,
VCSClient: vcsClient,
- LockDetailTemplate: templates.LockTemplate,
+ LockDetailTemplate: web_templates.LockTemplate,
WorkingDir: workingDir,
WorkingDirLocker: workingDirLocker,
Backend: backend,
@@ -847,8 +867,8 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
AtlantisVersion: config.AtlantisVersion,
AtlantisURL: parsedURL,
Logger: logger,
- ProjectJobsTemplate: templates.ProjectJobsTemplate,
- ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate,
+ ProjectJobsTemplate: web_templates.ProjectJobsTemplate,
+ ProjectJobsErrorTemplate: web_templates.ProjectJobsErrorTemplate,
Backend: backend,
WsMux: wsMux,
KeyGenerator: controllers.JobIDKeyGenerator{},
@@ -889,6 +909,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
AzureDevopsWebhookBasicUser: []byte(userConfig.AzureDevopsWebhookUser),
AzureDevopsWebhookBasicPassword: []byte(userConfig.AzureDevopsWebhookPassword),
AzureDevopsRequestValidator: &events_controllers.DefaultAzureDevopsRequestValidator{},
+ GiteaWebhookSecret: []byte(userConfig.GiteaWebhookSecret),
}
githubAppController := &controllers.GithubAppController{
AtlantisURL: parsedURL,
@@ -918,10 +939,10 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
JobsController: jobsController,
StatusController: statusController,
APIController: apiController,
- IndexTemplate: templates.IndexTemplate,
- LockDetailTemplate: templates.LockTemplate,
- ProjectJobsTemplate: templates.ProjectJobsTemplate,
- ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate,
+ IndexTemplate: web_templates.IndexTemplate,
+ LockDetailTemplate: web_templates.LockTemplate,
+ ProjectJobsTemplate: web_templates.ProjectJobsTemplate,
+ ProjectJobsErrorTemplate: web_templates.ProjectJobsErrorTemplate,
SSLKeyFile: userConfig.SSLKeyFile,
SSLCertFile: userConfig.SSLCertFile,
DisableGlobalApplyLock: userConfig.DisableGlobalApplyLock,
@@ -1046,10 +1067,10 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) {
return
}
- var lockResults []templates.LockIndexData
+ var lockResults []web_templates.LockIndexData
for id, v := range locks {
lockURL, _ := s.Router.Get(LockViewRouteName).URL("id", url.QueryEscape(id))
- lockResults = append(lockResults, templates.LockIndexData{
+ lockResults = append(lockResults, web_templates.LockIndexData{
// NOTE: must use .String() instead of .Path because we need the
// query params as part of the lock URL.
LockPath: lockURL.String(),
@@ -1071,7 +1092,7 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) {
return
}
- applyLockData := templates.ApplyLockData{
+ applyLockData := web_templates.ApplyLockData{
Time: applyCmdLock.Time,
Locked: applyCmdLock.Locked,
GlobalApplyLockEnabled: applyCmdLock.GlobalApplyLockEnabled,
@@ -1080,7 +1101,7 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) {
//Sort by date - newest to oldest.
sort.SliceStable(lockResults, func(i, j int) bool { return lockResults[i].Time.After(lockResults[j].Time) })
- err = s.IndexTemplate.Execute(w, templates.IndexData{
+ err = s.IndexTemplate.Execute(w, web_templates.IndexData{
Locks: lockResults,
PullToJobMapping: preparePullToJobMappings(s),
ApplyLock: applyLockData,
diff --git a/server/server_test.go b/server/server_test.go
index e9151443e2..e96c6aa6b4 100644
--- a/server/server_test.go
+++ b/server/server_test.go
@@ -27,8 +27,8 @@ import (
"github.com/gorilla/mux"
. "github.com/petergtz/pegomock/v4"
"github.com/runatlantis/atlantis/server"
- "github.com/runatlantis/atlantis/server/controllers/templates"
- tMocks "github.com/runatlantis/atlantis/server/controllers/templates/mocks"
+ "github.com/runatlantis/atlantis/server/controllers/web_templates"
+ tMocks "github.com/runatlantis/atlantis/server/controllers/web_templates/mocks"
"github.com/runatlantis/atlantis/server/core/locking/mocks"
"github.com/runatlantis/atlantis/server/events/models"
"github.com/runatlantis/atlantis/server/jobs"
@@ -113,13 +113,13 @@ func TestIndex_Success(t *testing.T) {
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
w := httptest.NewRecorder()
s.Index(w, req)
- it.VerifyWasCalledOnce().Execute(w, templates.IndexData{
- ApplyLock: templates.ApplyLockData{
+ it.VerifyWasCalledOnce().Execute(w, web_templates.IndexData{
+ ApplyLock: web_templates.ApplyLockData{
Locked: false,
Time: time.Time{},
TimeFormatted: "01-01-0001 00:00:00",
},
- Locks: []templates.LockIndexData{
+ Locks: []web_templates.LockIndexData{
{
LockPath: "/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault",
RepoFullName: "lkysow/atlantis-example",
diff --git a/server/user_config.go b/server/user_config.go
index 977b008610..8109a30277 100644
--- a/server/user_config.go
+++ b/server/user_config.go
@@ -58,6 +58,11 @@ type UserConfig struct {
GithubAppKeyFile string `mapstructure:"gh-app-key-file"`
GithubAppSlug string `mapstructure:"gh-app-slug"`
GithubTeamAllowlist string `mapstructure:"gh-team-allowlist"`
+ GiteaBaseURL string `mapstructure:"gitea-base-url"`
+ GiteaToken string `mapstructure:"gitea-token"`
+ GiteaUser string `mapstructure:"gitea-user"`
+ GiteaWebhookSecret string `mapstructure:"gitea-webhook-secret"`
+ GiteaPageSize int `mapstructure:"gitea-page-size"`
GitlabHostname string `mapstructure:"gitlab-hostname"`
GitlabToken string `mapstructure:"gitlab-token"`
GitlabUser string `mapstructure:"gitlab-user"`
diff --git a/testdrive/utils.go b/testdrive/utils.go
index cbf706d587..ba6d8288b4 100644
--- a/testdrive/utils.go
+++ b/testdrive/utils.go
@@ -35,7 +35,7 @@ import (
)
const hashicorpReleasesURL = "https://releases.hashicorp.com"
-const terraformVersion = "1.7.4" // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp
+const terraformVersion = "1.7.5" // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp
const ngrokDownloadURL = "https://bin.equinox.io/c/4VmDzA7iaHb"
const ngrokAPIURL = "localhost:41414" // We hope this isn't used.
const atlantisPort = 4141
diff --git a/testing/Dockerfile b/testing/Dockerfile
index aafcb3cbe8..2b8ee0e19c 100644
--- a/testing/Dockerfile
+++ b/testing/Dockerfile
@@ -6,7 +6,7 @@ RUN apt-get update && apt-get --no-install-recommends -y install unzip \
# Install Terraform
# renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp
-ENV TERRAFORM_VERSION=1.7.4
+ENV TERRAFORM_VERSION=1.7.5
RUN case $(uname -m) in x86_64|amd64) ARCH="amd64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \
wget -nv -O terraform.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_${ARCH}.zip && \
mkdir -p /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \
@@ -16,7 +16,7 @@ RUN case $(uname -m) in x86_64|amd64) ARCH="amd64" ;; aarch64|arm64|armv7l) ARCH
# Install conftest
# renovate: datasource=github-releases depName=open-policy-agent/conftest
-ENV CONFTEST_VERSION=0.50.0
+ENV CONFTEST_VERSION=0.51.0
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN case $(uname -m) in x86_64|amd64) ARCH="x86_64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \
curl -LOs https://github.com/open-policy-agent/conftest/releases/download/v${CONFTEST_VERSION}/conftest_${CONFTEST_VERSION}_Linux_${ARCH}.tar.gz && \