From cbfe71e7d917eeefeea4e43ad3364a0cf676b85d Mon Sep 17 00:00:00 2001 From: alanjino Date: Wed, 3 Apr 2024 18:28:05 +0530 Subject: [PATCH] pre-commit implementation Signed-off-by: alanjino --- .github/workflows/agent-container-pr.yml | 10 +-- .github/workflows/agent-container.yml | 8 +- .github/workflows/agent-git-pr.yml | 8 +- .github/workflows/agent-git.yml | 8 +- .github/workflows/agent-kubviz-image.yml | 8 +- .github/workflows/agent-kubviz-pr.yml | 8 +- .github/workflows/apisec-scan.yml | 4 +- .github/workflows/client-image.yml | 8 +- .github/workflows/client-pr.yml | 10 +-- .github/workflows/codeql.yml | 6 +- .github/workflows/devskim.yml | 2 +- .github/workflows/makefile.yml | 8 +- .github/workflows/neuralegion.yml | 4 +- .github/workflows/scorecards.yml | 8 +- .github/workflows/sonarcloud.yml | 16 ++-- .github/workflows/soos-dast-scan.yml | 2 +- .github/workflows/synopsys-io.yml | 16 ++-- .github/workflows/sysdig-scan.yml | 4 +- .github/workflows/tfsec.yml | 6 +- .pre-commit-config.yaml | 87 +++++++++++++++++++ README.md | 24 ++--- agent/container/main.go | 2 +- agent/container/openapi.yaml | 2 +- .../container/pkg/application/application.go | 2 +- agent/container/pkg/application/handlers.go | 2 +- agent/container/pkg/handler/api_handler.go | 2 +- .../container/pkg/handler/azure_container.go | 2 +- .../pkg/handler/docker_event_dockerhub.go | 2 +- .../container/pkg/handler/jfrog_container.go | 2 +- agent/container/pkg/handler/quay_handler.go | 2 +- agent/git/pkg/application/application.go | 2 +- agent/git/pkg/application/handlers.go | 6 +- agent/server/server.go | 2 +- charts/client/Chart.yaml | 2 +- .../configmap-vertamedia-datasource.yaml | 4 +- charts/client/values.yaml | 4 +- client/main.go | 2 +- client/pkg/clients/bridge_client.go | 2 +- client/pkg/clients/container_client.go | 2 +- docs/CONFIGURATION.md | 6 +- docs/CONTRIBUTING.md | 2 +- sql/0000010_trivy_misconfig.up.sql | 4 +- sql/0000011_trivyimage.up.sql | 4 +- sql/0000012_dockerhubbuild.up.sql | 4 +- sql/0000013_azurecontainerpush.up.sql | 4 +- sql/0000014_quaycontainerpush.up.sql | 4 +- sql/0000015_trivysbom.up.sql | 4 +- sql/0000016_azure_devops.up.sql | 4 +- sql/0000017_github.up.sql | 4 +- sql/0000018_gitlab.up.sql | 4 +- sql/0000019_bitbucket.up.sql | 4 +- sql/000001_events.up.sql | 4 +- sql/0000020_gitea.up.sql | 4 +- sql/000002_rakkess.up.sql | 4 +- sql/000003_DeprecatedAPIs.up.sql | 4 +- sql/000004_DeletedAPIs.up.sql | 4 +- sql/000005_jfrogcontainerpush.up.sql | 4 +- sql/000006_getall_resources.up.sql | 4 +- sql/000007_outdated_images.up.sql | 4 +- sql/000008_kubescore.up.sql | 4 +- sql/000009_trivy_vul.up.sql | 4 +- 61 files changed, 237 insertions(+), 150 deletions(-) create mode 100644 .pre-commit-config.yaml diff --git a/.github/workflows/agent-container-pr.yml b/.github/workflows/agent-container-pr.yml index 5c61e536..035fe9bd 100644 --- a/.github/workflows/agent-container-pr.yml +++ b/.github/workflows/agent-container-pr.yml @@ -17,11 +17,11 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 0 - + - name: Set up QEMU uses: docker/setup-qemu-action@v2 - + - uses: docker/setup-buildx-action@v1 name: Set up Docker Buildx @@ -32,8 +32,8 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - + + - name: Build and push on PR uses: docker/build-push-action@v4 @@ -45,4 +45,4 @@ jobs: tags: ${{ env.REGISTRY }}/${{ github.repository }}/container-agent:pr-${{ github.event.pull_request.number }} build-args: | "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" - + diff --git a/.github/workflows/agent-container.yml b/.github/workflows/agent-container.yml index cf023c6f..d560d873 100644 --- a/.github/workflows/agent-container.yml +++ b/.github/workflows/agent-container.yml @@ -25,7 +25,7 @@ jobs: steps: - name: Checkout GitHub Action uses: actions/checkout@v3 - + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v2 @@ -56,7 +56,7 @@ jobs: file: ./dockerfiles/agent/container/Dockerfile tags: ${{ env.REGISTRY }}/${{ github.repository }}/container-agent:${{ github.run_id }} labels: ${{ steps.metadata.outputs.labels }} - + push: true - name: Install cosign @@ -67,12 +67,12 @@ jobs: cosign sign -y ${{ env.REGISTRY }}/${{ github.repository }}/container-agent:${{ github.run_id }} env: COSIGN_EXPERIMENTAL: 1 - + - name: Verify the pushed tags run: cosign verify ${{ env.REGISTRY }}/${{ github.repository }}/container-agent:${{ github.run_id }} --certificate-identity ${{ env.GH_URL }}/${{ github.repository }}/.github/workflows/agent-container.yml@refs/heads/main --certificate-oidc-issuer https://token.actions.githubusercontent.com env: COSIGN_EXPERIMENTAL: 1 - + - name: Run Trivy in GitHub SBOM mode and submit results to Dependency Graph uses: aquasecurity/trivy-action@master with: diff --git a/.github/workflows/agent-git-pr.yml b/.github/workflows/agent-git-pr.yml index cd2f294c..da62d8be 100644 --- a/.github/workflows/agent-git-pr.yml +++ b/.github/workflows/agent-git-pr.yml @@ -17,11 +17,11 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 0 - + - name: Set up QEMU uses: docker/setup-qemu-action@v2 - + - uses: docker/setup-buildx-action@v1 name: Set up Docker Buildx @@ -32,7 +32,7 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - + - name: Build and push on PR uses: docker/build-push-action@v4 @@ -44,4 +44,4 @@ jobs: tags: ${{ env.REGISTRY }}/${{ github.repository }}/git-agent:pr-${{ github.event.pull_request.number }} build-args: | "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" - + diff --git a/.github/workflows/agent-git.yml b/.github/workflows/agent-git.yml index 657b6684..83631cd7 100644 --- a/.github/workflows/agent-git.yml +++ b/.github/workflows/agent-git.yml @@ -25,7 +25,7 @@ jobs: steps: - name: Checkout GitHub Action uses: actions/checkout@v3 - + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v2 @@ -56,7 +56,7 @@ jobs: file: ./dockerfiles/agent/git/Dockerfile tags: ${{ env.REGISTRY }}/${{ github.repository }}/git-agent:${{ github.run_id }} labels: ${{ steps.metadata.outputs.labels }} - + push: true - name: Install cosign @@ -67,12 +67,12 @@ jobs: cosign sign -y ${{ env.REGISTRY }}/${{ github.repository }}/git-agent:${{ github.run_id }} env: COSIGN_EXPERIMENTAL: 1 - + - name: Verify the pushed tags run: cosign verify ${{ env.REGISTRY }}/${{ github.repository }}/git-agent:${{ github.run_id }} --certificate-identity ${{ env.GH_URL }}/${{ github.repository }}/.github/workflows/agent-git.yml@refs/heads/main --certificate-oidc-issuer https://token.actions.githubusercontent.com env: COSIGN_EXPERIMENTAL: 1 - + - name: Run Trivy in GitHub SBOM mode and submit results to Dependency Graph uses: aquasecurity/trivy-action@master with: diff --git a/.github/workflows/agent-kubviz-image.yml b/.github/workflows/agent-kubviz-image.yml index 30586d63..44c2bb67 100644 --- a/.github/workflows/agent-kubviz-image.yml +++ b/.github/workflows/agent-kubviz-image.yml @@ -25,7 +25,7 @@ jobs: steps: - name: Checkout GitHub Action uses: actions/checkout@v3 - + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v2 @@ -56,7 +56,7 @@ jobs: file: ./dockerfiles/agent/kubviz/Dockerfile tags: ${{ env.REGISTRY }}/${{ github.repository }}/kubviz-agent:${{ github.run_id }} labels: ${{ steps.metadata.outputs.labels }} - + push: true - name: Install cosign @@ -67,12 +67,12 @@ jobs: cosign sign -y ${{ env.REGISTRY }}/${{ github.repository }}/kubviz-agent:${{ github.run_id }} env: COSIGN_EXPERIMENTAL: 1 - + - name: Verify the pushed tags run: cosign verify ${{ env.REGISTRY }}/${{ github.repository }}/kubviz-agent:${{ github.run_id }} --certificate-identity ${{ env.GH_URL }}/${{ github.repository }}/.github/workflows/agent-kubviz-image.yml@refs/heads/main --certificate-oidc-issuer https://token.actions.githubusercontent.com env: COSIGN_EXPERIMENTAL: 1 - + - name: Run Trivy in GitHub SBOM mode and submit results to Dependency Graph uses: aquasecurity/trivy-action@master with: diff --git a/.github/workflows/agent-kubviz-pr.yml b/.github/workflows/agent-kubviz-pr.yml index cc0cf561..29eef866 100644 --- a/.github/workflows/agent-kubviz-pr.yml +++ b/.github/workflows/agent-kubviz-pr.yml @@ -17,11 +17,11 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 0 - + - name: Set up QEMU uses: docker/setup-qemu-action@v2 - + - uses: docker/setup-buildx-action@v1 name: Set up Docker Buildx @@ -32,7 +32,7 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - + - name: Build and push on PR uses: docker/build-push-action@v4 @@ -44,4 +44,4 @@ jobs: tags: ${{ env.REGISTRY }}/${{ github.repository }}/kubviz-agent:pr-${{ github.event.pull_request.number }} build-args: | "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" - + diff --git a/.github/workflows/apisec-scan.yml b/.github/workflows/apisec-scan.yml index b834f854..6151d036 100644 --- a/.github/workflows/apisec-scan.yml +++ b/.github/workflows/apisec-scan.yml @@ -3,8 +3,8 @@ # separate terms of service, privacy policy, and support # documentation. -# APIsec addresses the critical need to secure APIs before they reach production. -# APIsec provides the industry’s only automated and continuous API testing platform that uncovers security vulnerabilities and logic flaws in APIs. +# APIsec addresses the critical need to secure APIs before they reach production. +# APIsec provides the industry’s only automated and continuous API testing platform that uncovers security vulnerabilities and logic flaws in APIs. # Clients rely on APIsec to evaluate every update and release, ensuring that no APIs go to production with vulnerabilities. # How to Get Started with APIsec.ai diff --git a/.github/workflows/client-image.yml b/.github/workflows/client-image.yml index 77e20a1c..153881de 100644 --- a/.github/workflows/client-image.yml +++ b/.github/workflows/client-image.yml @@ -25,7 +25,7 @@ jobs: steps: - name: Checkout GitHub Action uses: actions/checkout@v3 - + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v2 @@ -56,7 +56,7 @@ jobs: file: ./dockerfiles/client/Dockerfile tags: ${{ env.REGISTRY }}/${{ github.repository }}/client:${{ github.run_id }} labels: ${{ steps.metadata.outputs.labels }} - + push: true - name: Install cosign @@ -67,12 +67,12 @@ jobs: cosign sign -y ${{ env.REGISTRY }}/${{ github.repository }}/client:${{ github.run_id }} env: COSIGN_EXPERIMENTAL: 1 - + - name: Verify the pushed tags run: cosign verify ${{ env.REGISTRY }}/${{ github.repository }}/client:${{ github.run_id }} --certificate-identity ${{ env.GH_URL }}/${{ github.repository }}/.github/workflows/client-image.yml@refs/heads/main --certificate-oidc-issuer https://token.actions.githubusercontent.com env: COSIGN_EXPERIMENTAL: 1 - + - name: Run Trivy in GitHub SBOM mode and submit results to Dependency Graph uses: aquasecurity/trivy-action@master with: diff --git a/.github/workflows/client-pr.yml b/.github/workflows/client-pr.yml index 870686a1..cf312014 100644 --- a/.github/workflows/client-pr.yml +++ b/.github/workflows/client-pr.yml @@ -17,11 +17,11 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 0 - + - name: Set up QEMU uses: docker/setup-qemu-action@v2 - + - uses: docker/setup-buildx-action@v1 name: Set up Docker Buildx @@ -32,8 +32,8 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - + + - name: Build and push on PR uses: docker/build-push-action@v4 @@ -45,4 +45,4 @@ jobs: tags: ${{ env.REGISTRY }}/${{ github.repository }}/client:pr-${{ github.event.pull_request.number }} build-args: | "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" - + diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 533200d3..df705353 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -48,11 +48,11 @@ jobs: # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. - + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # queries: security-extended,security-and-quality - + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild @@ -61,7 +61,7 @@ jobs: # ℹ️ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - # If the Autobuild fails above, remove it and uncomment the following three lines. + # If the Autobuild fails above, remove it and uncomment the following three lines. # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. # - run: | diff --git a/.github/workflows/devskim.yml b/.github/workflows/devskim.yml index 62d8407f..d1684f76 100644 --- a/.github/workflows/devskim.yml +++ b/.github/workflows/devskim.yml @@ -27,7 +27,7 @@ jobs: - name: Run DevSkim scanner uses: microsoft/DevSkim-Action@v1 - + - name: Upload DevSkim scan results to GitHub Security tab uses: github/codeql-action/upload-sarif@v2 with: diff --git a/.github/workflows/makefile.yml b/.github/workflows/makefile.yml index e4dadb23..8db022e5 100644 --- a/.github/workflows/makefile.yml +++ b/.github/workflows/makefile.yml @@ -13,15 +13,15 @@ jobs: steps: - uses: actions/checkout@v3 - + - name: configure run: ./configure - + - name: Install dependencies run: make - + - name: Run check run: make check - + - name: Run distcheck run: make distcheck diff --git a/.github/workflows/neuralegion.yml b/.github/workflows/neuralegion.yml index 9d130bcb..341929c3 100644 --- a/.github/workflows/neuralegion.yml +++ b/.github/workflows/neuralegion.yml @@ -50,7 +50,7 @@ # # `restart_scan` # -# **Required** when restarting an existing scan by its ID. You can get the scan ID in the Scans section on [nexploit.app](https://nexploit.app/login).
Please make sure to only use the necessary parameters. Otherwise, you will get a response with the parameter usage requirements. +# **Required** when restarting an existing scan by its ID. You can get the scan ID in the Scans section on [nexploit.app](https://nexploit.app/login).
Please make sure to only use the necessary parameters. Otherwise, you will get a response with the parameter usage requirements. # # _Example:_ `restart_scan: ai3LG8DmVn9Rn1YeqCNRGQ)` # @@ -95,7 +95,7 @@ # # `hosts_filter` # -# **Required** when the the discovery type is set to `archive`. Allows selecting specific hosts for a scan. +# **Required** when the the discovery type is set to `archive`. Allows selecting specific hosts for a scan. # # Outputs # diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index 51a3db4f..b4d7cff2 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -22,7 +22,7 @@ jobs: # Needs for private repositories. contents: read actions: read - + steps: - name: "Checkout code" uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # v3.0.0 @@ -41,8 +41,8 @@ jobs: # repo_token: ${{ secrets.SCORECARD_READ_TOKEN }} # Publish the results for public repositories to enable scorecard badges. For more details, see - # https://github.com/ossf/scorecard-action#publishing-results. - # For private repositories, `publish_results` will automatically be set to `false`, regardless + # https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories, `publish_results` will automatically be set to `false`, regardless # of the value entered here. publish_results: true @@ -54,7 +54,7 @@ jobs: name: SARIF file path: results.sarif retention-days: 5 - + # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" uses: github/codeql-action/upload-sarif@5f532563584d71fdef14ee64d17bafb34f751ce5 # v1.0.26 diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml index 81c46613..e9464daa 100644 --- a/.github/workflows/sonarcloud.yml +++ b/.github/workflows/sonarcloud.yml @@ -3,7 +3,7 @@ # separate terms of service, privacy policy, and support # documentation. -# This workflow helps you trigger a SonarCloud analysis of your code and populates +# This workflow helps you trigger a SonarCloud analysis of your code and populates # GitHub Code Scanning alerts with the vulnerabilities found. # Free for open source project. @@ -11,16 +11,16 @@ # 2. Import your project on SonarCloud # * Add your GitHub organization first, then add your repository as a new project. -# * Please note that many languages are eligible for automatic analysis, +# * Please note that many languages are eligible for automatic analysis, # which means that the analysis will start automatically without the need to set up GitHub Actions. # * This behavior can be changed in Administration > Analysis Method. -# +# # 3. Follow the SonarCloud in-product tutorial # * a. Copy/paste the Project Key and the Organization Key into the args parameter below # (You'll find this information in SonarCloud. Click on "Information" at the bottom left) # # * b. Generate a new token and add it to your Github repository's secrets using the name SONAR_TOKEN -# (On SonarCloud, click on your avatar on top-right > My account > Security +# (On SonarCloud, click on your avatar on top-right > My account > Security # or go directly to https://sonarcloud.io/account/security/) # Feel free to take a look at our documentation (https://docs.sonarcloud.io/getting-started/github/) @@ -41,9 +41,9 @@ permissions: jobs: Analysis: runs-on: ubuntu-latest - + steps: - - name: Analyze with SonarCloud + - name: Analyze with SonarCloud # You can pin the exact commit or the version. # uses: SonarSource/sonarcloud-github-action@de2e56b42aa84d0b1c5b622644ac17e505c9a049 @@ -53,7 +53,7 @@ jobs: SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} # Generate a token on Sonarcloud.io, add it to the secrets of this repo with the name SONAR_TOKEN (Settings > Secrets > Actions > add new repository secret) with: # Additional arguments for the sonarcloud scanner - args: + args: # Unique keys of your project and organization. You can find them in SonarCloud > Information (bottom-left menu) # mandatory -Dsonar.projectKey= @@ -65,4 +65,4 @@ jobs: # Comma-separated paths to directories containing test source files. #-Dsonar.tests= # optional. For more info about Code Coverage, please refer to https://docs.sonarcloud.io/enriching/test-coverage/overview/ # Adds more detail to both client and server-side analysis logs, activating DEBUG mode for the scanner, and adding client-side environment variables and system properties to the server-side log of analysis report processing. - #-Dsonar.verbose= # optional, default is false + #-Dsonar.verbose= # optional, default is false diff --git a/.github/workflows/soos-dast-scan.yml b/.github/workflows/soos-dast-scan.yml index 4fad66ea..5e4418ba 100644 --- a/.github/workflows/soos-dast-scan.yml +++ b/.github/workflows/soos-dast-scan.yml @@ -24,7 +24,7 @@ jobs: soos: permissions: security-events: write # for uploading code scanning alert info - actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status + actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status name: SOOS DAST Scan runs-on: ubuntu-latest steps: diff --git a/.github/workflows/synopsys-io.yml b/.github/workflows/synopsys-io.yml index e578be24..59891d57 100644 --- a/.github/workflows/synopsys-io.yml +++ b/.github/workflows/synopsys-io.yml @@ -22,11 +22,11 @@ jobs: actions: read contents: read security-events: write - + steps: - name: Checkout repository uses: actions/checkout@v3 - + - name: Synopsys Intelligent Security Scan id: prescription uses: synopsys-sig/intelligent-security-scan@48eedfcd42bc342a294dc495ac452797b2d9ff08 @@ -36,7 +36,7 @@ jobs: workflowServerUrl: ${{secrets.WORKFLOW_SERVER_URL}} additionalWorkflowArgs: --polaris.url=${{secrets.POLARIS_SERVER_URL}} --polaris.token=${{secrets.POLARIS_ACCESS_TOKEN}} stage: "IO" - + # Please note that the ID in previous step was set to prescription # in order for this logic to work also make sure that POLARIS_ACCESS_TOKEN # is defined in settings @@ -48,7 +48,7 @@ jobs: wget -q ${{ secrets.POLARIS_SERVER_URL}}/api/tools/polaris_cli-linux64.zip unzip -j polaris_cli-linux64.zip -d /tmp /tmp/polaris analyze -w - + # Please note that the ID in previous step was set to prescription # in order for this logic to work - name: Software Composition Analysis with Black Duck @@ -56,7 +56,7 @@ jobs: uses: blackducksoftware/github-action@9ea442b34409737f64743781e9adc71fd8e17d38 with: args: '--blackduck.url="${{ secrets.BLACKDUCK_URL}}" --blackduck.api.token="${{ secrets.BLACKDUCK_TOKEN}}" --detect.tools="SIGNATURE_SCAN,DETECTOR"' - + - name: Synopsys Intelligent Security Scan if: ${{ steps.prescription.outputs.sastScan == 'true' || steps.prescription.outputs.scaScan == 'true' }} uses: synopsys-sig/intelligent-security-scan@48eedfcd42bc342a294dc495ac452797b2d9ff08 @@ -64,11 +64,11 @@ jobs: ioServerUrl: ${{secrets.IO_SERVER_URL}} ioServerToken: ${{secrets.IO_SERVER_TOKEN}} workflowServerUrl: ${{secrets.WORKFLOW_SERVER_URL}} - additionalWorkflowArgs: --IS_SAST_ENABLED=${{steps.prescription.outputs.sastScan}} --IS_SCA_ENABLED=${{steps.prescription.outputs.scaScan}} - --polaris.project.name={{PROJECT_NAME}} --polaris.url=${{secrets.POLARIS_SERVER_URL}} --polaris.token=${{secrets.POLARIS_ACCESS_TOKEN}} + additionalWorkflowArgs: --IS_SAST_ENABLED=${{steps.prescription.outputs.sastScan}} --IS_SCA_ENABLED=${{steps.prescription.outputs.scaScan}} + --polaris.project.name={{PROJECT_NAME}} --polaris.url=${{secrets.POLARIS_SERVER_URL}} --polaris.token=${{secrets.POLARIS_ACCESS_TOKEN}} --blackduck.project.name={{PROJECT_NAME}}:{{PROJECT_VERSION}} --blackduck.url=${{secrets.BLACKDUCK_URL}} --blackduck.api.token=${{secrets.BLACKDUCK_TOKEN}} stage: "WORKFLOW" - + - name: Upload SARIF file if: ${{steps.prescription.outputs.sastScan == 'true' }} uses: github/codeql-action/upload-sarif@v2 diff --git a/.github/workflows/sysdig-scan.yml b/.github/workflows/sysdig-scan.yml index 0a628f9b..568b8a9d 100644 --- a/.github/workflows/sysdig-scan.yml +++ b/.github/workflows/sysdig-scan.yml @@ -38,7 +38,7 @@ jobs: id: scan uses: sysdiglabs/scan-action@768d7626a14897e0948ea89c8437dd46a814b163 with: - # Tag of the image to analyse. + # Tag of the image to analyse. # Change ${{ github.repository }} variable by another image name if you want but don't forget changing also image-tag above image-tag: ${{ github.repository }}:latest # API token for Sysdig Scanning auth @@ -46,7 +46,7 @@ jobs: # Sysdig secure endpoint. Please read: https://docs.sysdig.com/en/docs/administration/saas-regions-and-ip-ranges/ # US-East https://secure.sysdig.com # US-West https://us2.app.sysdig.com - # EU https://eu1.app.sysdig.com + # EU https://eu1.app.sysdig.com sysdig-secure-url: https://us2.app.sysdig.com dockerfile-path: ./Dockerfile input-type: docker-daemon diff --git a/.github/workflows/tfsec.yml b/.github/workflows/tfsec.yml index d81c0e70..3f55eba6 100644 --- a/.github/workflows/tfsec.yml +++ b/.github/workflows/tfsec.yml @@ -9,7 +9,7 @@ on: push: branches: [ "main" ] pull_request: - branches: [ "main" ] + branches: [ "main" ] schedule: - cron: '28 1 * * 6' @@ -29,10 +29,10 @@ jobs: - name: Run tfsec uses: tfsec/tfsec-sarif-action@9a83b5c3524f825c020e356335855741fd02745f with: - sarif_file: tfsec.sarif + sarif_file: tfsec.sarif - name: Upload SARIF file uses: github/codeql-action/upload-sarif@v2 with: # Path to SARIF file relative to the root of the repository - sarif_file: tfsec.sarif + sarif_file: tfsec.sarif diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..18725306 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,87 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + # Checks for files that contain merge conflict strings. + - id: check-merge-conflict + # Detects aws credentials from the aws cli credentials file. + - id: detect-aws-credentials + args: [--allow-missing-credentials] + # detects the presence of private keys. + - id: detect-private-key + # Trims trailing whitespace in codebase. + - id: trailing-whitespace + # Protect commit to main branch + - id: no-commit-to-branch + args: [--branch,main] + + +# Check is the Commit is Signed off using `--signoff/-s` +- repo: https://github.com/KAUTH/pre-commit-git-checks + rev: v0.0.1 # Use the SHA or tag you want to point to + hooks: + - id: git-signoff + stages: [commit-msg] + +# Checks your git commit messages for style. +- repo: https://github.com/jorisroovers/gitlint + rev: v0.19.1 + hooks: + - id: gitlint + name: Scan Commit messages + +# Detects hardcoded secrets, security vulnerabilities and policy breaks using GGShield +- repo: https://github.com/zricethezav/gitleaks + rev: v8.18.1 + hooks: + - id: gitleaks + name: Detect hardcoded secrets + description: Detect hardcoded secrets using Gitleaks + entry: gitleaks protect --verbose --redact --staged + language: golang + pass_filenames: false + +- repo: https://github.com/Bahjat/pre-commit-golang + rev: v1.0.3 + hooks: + # Formats Go code + # - id: gofumpt # requires gofumpt to be installed from github.com/mvdan/gofumpt + # name: Go formatter + # description: Runs a strict Go formatter + - id: go-fmt-import + name: Go formatter + description: Go formatter with fmt and imports + # Runs Unit tests + - id: go-unit-tests + name: Run Unit tests + desription: Runs all the unit tests in the repo + # Runs static analysis of the Go code + - id: go-static-check + name: Go Static Check + description: Finds bugs and performance issues + +# Local hooks + +- repo: https://github.com/intelops/gitrepos-templates-policies + rev: v0.0.1 + hooks: + - id: check-devcontainer + name: Check devcontainer + description: Checks for existance of .devcontainer.json in the project + - id: check-dockerfile + name: Check Dockerfile + description: Enforce use of Chainguard base images in Dockefiles + - id: check-gitsign + name: Check gitsign + description: Check if the last commit is signed with Sigstore gitsign + - id: check-multistage-dockerfile + name: Check multi-stage Dockerfile + description: Check the existance of Dockerfile in the project and verify that its a multi-stage Dockerfile + +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + name: Verify YAML syntax + args: + - --allow-multiple-documents diff --git a/README.md b/README.md index f9ce3923..4271ad00 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ Visualize Kubernetes & DevSecOps Workflows. Tracks changes/events real-time acro ## How KubViz works -KubViz client can be installed on any Kubernetes cluster. KubViz agent runs in a kubernetes cluster where the changes/events need to be tracked. The agent detects the changes in real time and send those events via NATS JetStream and the same is received in the KubViz client. +KubViz client can be installed on any Kubernetes cluster. KubViz agent runs in a kubernetes cluster where the changes/events need to be tracked. The agent detects the changes in real time and send those events via NATS JetStream and the same is received in the KubViz client. KubViz client receives the events and passes it to Clickhouse database. The events present in the Clickhouse database can be visualized through Grafana. @@ -59,7 +59,7 @@ It comprehensively scans Kubernetes containers for security flaws, such as vulne ## How to install and run Kubviz #### Prerequisites -* A Kubernetes cluster +* A Kubernetes cluster * Helm binary #### Prepare Namespace @@ -87,10 +87,10 @@ token=$(openssl rand -base64 32 | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) helm upgrade -i kubviz-client kubviz/client -n kubviz --set "nats.auth.token=$token" ``` -**NOTE:** +**NOTE:** - If you want to get a token from a secret, use a secret reference with the secret's name and key. -**NOTE:** +**NOTE:** - If you want to enable Grafana with the client deployment, add `--set grafana.enabled=true` to the helm upgrade command. - Kubviz provides a setup for Grafana with Postgres data persistence, ensuring that even if the grafana pod/service goes down, the data will persist, safeguarding crucial information for visualization and analysis. @@ -99,7 +99,7 @@ helm upgrade -i kubviz-client kubviz/client -n kubviz --set "nats.auth.token=$to helm upgrade -i kubviz-client kubviz/client -n kubviz --set "nats.auth.token=$token" --set grafana.enabled=true --set grafana.postgresql=true ``` -- If grafana already exist use the same upgrade command without --set grafana.enabled=true flag. +- If grafana already exist use the same upgrade command without --set grafana.enabled=true flag. ```bash helm upgrade -i kubviz-client kubviz/client -n kubviz --set "nats.auth.token=$token" --set grafana.enabled=true @@ -130,12 +130,12 @@ kubectl get services kubviz-client-nats-external -n kubviz --output jsonpath='{. helm upgrade -i kubviz-agent kubviz/agent -n kubviz \ --set "nats.auth.token=$token" \ --set git_bridge.enabled=true \ - --set "git_bridge.ingress.hosts[0].host=",git_bridge.ingress.hosts[0].paths[0].path=/,git_bridge.ingress.hosts[0].paths[0].pathType=Prefix,git_bridge.ingress.tls[0].secretName=,git_bridge.ingress.tls[0].hosts[0]= \ + --set "git_bridge.ingress.hosts[0].host=",git_bridge.ingress.hosts[0].paths[0].path=/,git_bridge.ingress.hosts[0].paths[0].pathType=Prefix,git_bridge.ingress.tls[0].secretName=,git_bridge.ingress.tls[0].hosts[0]= \ --set container_bridge.enabled=true \ --set "container_bridge.ingress.hosts[0].host=",container_bridge.ingress.hosts[0].paths[0].path=/,container_bridge.ingress.hosts[0].paths[0].pathType=Prefix,container_bridge.ingress.tls[0].secretName=,container_bridge.ingress.tls[0].hosts[0]= ``` -**NOTE:** +**NOTE:** If you want to get a token from a secret, use a secret reference with the secret's name and key. 3. Replace "INGRESS HOSTNAME" with the desired hostname for the Git Bridge and Container Bridge Ingress configurations. @@ -155,7 +155,7 @@ Parameter | Description | Default `git_bridge.ingress.tls` | git_bridge ingress tls configuration | [] `container_bridge.ingress.tls` | container_bridge ingress tls configuration | [] -**NOTE:** +**NOTE:** - Default Annotations for Ingress @@ -180,11 +180,11 @@ helm upgrade -i kubviz-agent kubviz/agent -f values.yaml -n kubviz 1. Run the following command to deploy the KubViz agent: ```bash -helm upgrade -i kubviz-agent kubviz/agent -n kubviz --set nats.host= --set "nats.auth.token=$token" +helm upgrade -i kubviz-agent kubviz/agent -n kubviz --set nats.host= --set "nats.auth.token=$token" ``` 2. Replace "" with the IP address of your NATS service **kubviz-client-nats-external**. -**NOTE:** +**NOTE:** The time-based job scheduler is added for each plugin, allowing you to schedule and automate the execution of plugins at specific times or intervals. To activate this scheduler, set 'enabled' to 'true.' Once enabled, each plugin's execution can be configured to run at a precise time or at regular intervals, based on the provided settings. Additionally, if you set the 'schedulingInterval' to '0', it will disable the plugins. @@ -217,7 +217,7 @@ kubectl get secret --namespace kubviz kubviz-client-grafana -o jsonpath="{.data. ```bash export POD_NAME=$(kubectl get pods --namespace kubviz -l "app.kubernetes.io/name=grafana,app.kubernetes.io/instance=kubviz-client" -o jsonpath="{.items[0].metadata.name}") ``` -```bash +```bash kubectl --namespace kubviz port-forward $POD_NAME 3000 ``` @@ -243,7 +243,7 @@ To guide you through the process of setting up a TTL, [please follow these steps Use KubViz to monitor your cluster events, including: -- State changes +- State changes - Errors - Other messages that occur in the cluster diff --git a/agent/container/main.go b/agent/container/main.go index 56b6958a..72f76654 100755 --- a/agent/container/main.go +++ b/agent/container/main.go @@ -23,7 +23,7 @@ func main() { log.Printf("Error shutting down tracer provider: %v", err) } }() - + app := application.New() go app.GithubContainerWatch() go app.Start() diff --git a/agent/container/openapi.yaml b/agent/container/openapi.yaml index de7b8eb2..c3c3829d 100755 --- a/agent/container/openapi.yaml +++ b/agent/container/openapi.yaml @@ -59,6 +59,6 @@ paths: summary: Post Jfrog Container Registry webhook events responses: '200': - description: OK + description: OK # oapi-codegen -config ./cfg.yaml ./openapi.yaml diff --git a/agent/container/pkg/application/application.go b/agent/container/pkg/application/application.go index 7cabbbcd..6d81a970 100755 --- a/agent/container/pkg/application/application.go +++ b/agent/container/pkg/application/application.go @@ -52,7 +52,7 @@ func New() *Application { } r.Use(otelgin.Middleware(config.ServiceName)) - + apiServer.BindRequest(r) httpServer := &http.Server{ diff --git a/agent/container/pkg/application/handlers.go b/agent/container/pkg/application/handlers.go index cca3a95d..6d94f33c 100755 --- a/agent/container/pkg/application/handlers.go +++ b/agent/container/pkg/application/handlers.go @@ -20,7 +20,7 @@ func (app *Application) localRegistryHandler(w http.ResponseWriter, r *http.Requ _, span := tracer.Start(opentelemetry.BuildContext(ctx), "localRegistryHandler") span.SetAttributes(attribute.String("http.method", "POST")) defer span.End() - + event, err := io.ReadAll(r.Body) if err != nil { log.Printf("Event body read failed: %v", err) diff --git a/agent/container/pkg/handler/api_handler.go b/agent/container/pkg/handler/api_handler.go index 0e6bdd01..6efee78d 100755 --- a/agent/container/pkg/handler/api_handler.go +++ b/agent/container/pkg/handler/api_handler.go @@ -39,7 +39,7 @@ func (ah *APIHandler) BindRequest(r *gin.Engine) { } r.Use(otelgin.Middleware(config.ServiceName)) - + apiGroup := r.Group("/") { apiGroup.GET("/api-docs", ah.GetApiDocs) diff --git a/agent/container/pkg/handler/azure_container.go b/agent/container/pkg/handler/azure_container.go index 5e881703..35a72f3f 100644 --- a/agent/container/pkg/handler/azure_container.go +++ b/agent/container/pkg/handler/azure_container.go @@ -26,7 +26,7 @@ func (ah *APIHandler) PostEventAzureContainer(c *gin.Context) { _, span := tracer.Start(c.Request.Context(), "PostEventAzureContainer") span.SetAttributes(attribute.String("http.method", "POST")) defer span.End() - + defer func() { _, _ = io.Copy(io.Discard, c.Request.Body) _ = c.Request.Body.Close() diff --git a/agent/container/pkg/handler/docker_event_dockerhub.go b/agent/container/pkg/handler/docker_event_dockerhub.go index 9066c947..f74bd8ae 100644 --- a/agent/container/pkg/handler/docker_event_dockerhub.go +++ b/agent/container/pkg/handler/docker_event_dockerhub.go @@ -23,7 +23,7 @@ func (ah *APIHandler) PostEventDockerHub(c *gin.Context) { _, span := tracer.Start(c.Request.Context(), "PostEventDockerHub") span.SetAttributes(attribute.String("http.method", "POST")) defer span.End() - + defer func() { _, _ = io.Copy(io.Discard, c.Request.Body) _ = c.Request.Body.Close() diff --git a/agent/container/pkg/handler/jfrog_container.go b/agent/container/pkg/handler/jfrog_container.go index 77b0451f..8d57f272 100644 --- a/agent/container/pkg/handler/jfrog_container.go +++ b/agent/container/pkg/handler/jfrog_container.go @@ -21,7 +21,7 @@ func (ah *APIHandler) PostEventJfrogContainer(c *gin.Context) { _, span := tracer.Start(c.Request.Context(), "PostEventJfrogContainer") span.SetAttributes(attribute.String("http.method", "POST")) defer span.End() - + defer func() { _, _ = io.Copy(io.Discard, c.Request.Body) _ = c.Request.Body.Close() diff --git a/agent/container/pkg/handler/quay_handler.go b/agent/container/pkg/handler/quay_handler.go index b675658f..b1a2be84 100644 --- a/agent/container/pkg/handler/quay_handler.go +++ b/agent/container/pkg/handler/quay_handler.go @@ -18,7 +18,7 @@ func (ah *APIHandler) PostEventQuayContainer(c *gin.Context) { _, span := tracer.Start(c.Request.Context(), "PostEventQuayContainer") span.SetAttributes(attribute.String("http.method", "POST")) defer span.End() - + defer func() { _, _ = io.Copy(io.Discard, c.Request.Body) _ = c.Request.Body.Close() diff --git a/agent/git/pkg/application/application.go b/agent/git/pkg/application/application.go index 0ea3b17e..f8bd908d 100644 --- a/agent/git/pkg/application/application.go +++ b/agent/git/pkg/application/application.go @@ -50,7 +50,7 @@ func (app *Application) Routes() *gin.Engine { } router.Use(otelgin.Middleware(config.ServiceName)) - + api.RegisterHandlers(router, app) return router } diff --git a/agent/git/pkg/application/handlers.go b/agent/git/pkg/application/handlers.go index cb2c7cfe..fef778f5 100644 --- a/agent/git/pkg/application/handlers.go +++ b/agent/git/pkg/application/handlers.go @@ -20,7 +20,7 @@ func (app *Application) PostGitea(c *gin.Context) { _, span := tracer.Start(c.Request.Context(), "PostGitea") span.SetAttributes(attribute.String("http.method", "POST")) defer span.End() - + defer log.Println("gitea handler exited...") event := c.Request.Header.Get(string(model.GiteaHeader)) @@ -79,7 +79,7 @@ func (app *Application) PostGithub(c *gin.Context) { defer span.End() defer log.Println("github handler exited...") - + event := c.Request.Header.Get(string(model.GithubHeader)) if len(event) == 0 { log.Println("error getting the github event from header") @@ -131,7 +131,7 @@ func (app *Application) PostBitbucket(c *gin.Context) { _, span := tracer.Start(c.Request.Context(), "PostBitbucket") span.SetAttributes(attribute.String("http.method", "POST")) defer span.End() - + defer log.Println("bitbucket handler exited...") event := c.Request.Header.Get(string(model.BitBucketHeader)) diff --git a/agent/server/server.go b/agent/server/server.go index 70eb3b3e..db5f910a 100644 --- a/agent/server/server.go +++ b/agent/server/server.go @@ -41,7 +41,7 @@ func StartServer() { } r.Use(otelgin.Middleware(config.ServiceName)) - + EnableProfile(r) log.Fatal(r.Run(":8080")) } diff --git a/charts/client/Chart.yaml b/charts/client/Chart.yaml index b38b2315..e1cb82ad 100644 --- a/charts/client/Chart.yaml +++ b/charts/client/Chart.yaml @@ -35,4 +35,4 @@ dependencies: condition: grafana.enabled version: 1.0.5 repository: https://kube-tarian.github.io/helmrepo-supporting-tools/ - + diff --git a/charts/client/templates/configmap-vertamedia-datasource.yaml b/charts/client/templates/configmap-vertamedia-datasource.yaml index 627f1dc6..e50b83e3 100644 --- a/charts/client/templates/configmap-vertamedia-datasource.yaml +++ b/charts/client/templates/configmap-vertamedia-datasource.yaml @@ -17,11 +17,11 @@ data: basicAuth: true basicAuthUser: {{ .Values.clickhouse.user }} secureJsonData: - basicAuthPassword: {{ .Values.clickhouse.password }} + basicAuthPassword: {{ .Values.clickhouse.password }} {{- else }} url: {{ .Values.existingClickhouse.host }}:8123 access: proxy - basicAuth: true + basicAuth: true {{- if not .Values.existingClickhouse.secret }} basicAuthUser: {{ .Values.existingClickhouse.username }} {{- else }} diff --git a/charts/client/values.yaml b/charts/client/values.yaml index a6caddf7..269aa52a 100644 --- a/charts/client/values.yaml +++ b/charts/client/values.yaml @@ -127,7 +127,7 @@ grafana: allowUiUpdates: true postgresql: enabled: false - database: + database: type: postgres host: kubviz-client-postgresql:5432 name: postgres @@ -176,7 +176,7 @@ opentelemetry: isEnabled: false url: "otelcollector.local" appName: "kubviz" - + consumer: ketallconsumer: "KETALL_EVENTS_CONSUMER" rakeesconsumer: "RAKEES_METRICS_CONSUMER" diff --git a/client/main.go b/client/main.go index af2316db..842de37f 100644 --- a/client/main.go +++ b/client/main.go @@ -24,7 +24,7 @@ func main() { log.Printf("Error shutting down tracer provider: %v", err) } }() - + app := application.Start() signals := make(chan os.Signal, 1) diff --git a/client/pkg/clients/bridge_client.go b/client/pkg/clients/bridge_client.go index 999cc07a..9b721805 100644 --- a/client/pkg/clients/bridge_client.go +++ b/client/pkg/clients/bridge_client.go @@ -46,7 +46,7 @@ func (n *NATSContext) SubscribeGitBridgeNats(conn clickhouse.DBInterface) { _, span := tracer.Start(opentelemetry.BuildContext(ctx), "SubscribeGitBridgeNats") span.SetAttributes(attribute.String("git-subscribe", "Subscribe")) defer span.End() - + n.stream.Subscribe(string(bridgeSubject), func(msg *nats.Msg) { msg.Ack() gitprovider := msg.Header.Get("GitProvider") diff --git a/client/pkg/clients/container_client.go b/client/pkg/clients/container_client.go index cea17181..ff571614 100644 --- a/client/pkg/clients/container_client.go +++ b/client/pkg/clients/container_client.go @@ -36,7 +36,7 @@ func (n *NATSContext) SubscribeContainerNats(conn clickhouse.DBInterface) { _, span := tracer.Start(opentelemetry.BuildContext(ctx), "SubscribeContainerNats") span.SetAttributes(attribute.String("container-subscribe", "Subscribe")) defer span.End() - + n.stream.Subscribe(string(containerSubject), func(msg *nats.Msg) { msg.Ack() repoName := msg.Header.Get("REPO_NAME") diff --git a/docs/CONFIGURATION.md b/docs/CONFIGURATION.md index cb89631a..00d8c796 100644 --- a/docs/CONFIGURATION.md +++ b/docs/CONFIGURATION.md @@ -14,11 +14,11 @@ Please replace the section with the specific ingress host nam Possible values are: Values | Platform | ------- | -------- | +------ | -------- | `/github` | GitHub | `/gitlab` | GitLab | `/gitea` | Gitea | -`/bitbucket` | BitBucket | +`/bitbucket` | BitBucket | `/azure` | Azure | 2. The URL for a Container Registry will appear in the following format: @@ -32,7 +32,7 @@ Please replace the section with the specific ingress host nam Possible values are: Values | Platform | ------- | -------- | +------ | -------- | `/event/docker/hub` | DockerHub | `/event/azure/container` | Azure | `/event/jfrog/container` | JFrog | diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 76918c21..20425d2e 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -28,7 +28,7 @@ You are more than welcome to open issues in this project to [suggest new feature This project is written in Golang. -You need +You need `Go 1.16+` diff --git a/sql/0000010_trivy_misconfig.up.sql b/sql/0000010_trivy_misconfig.up.sql index 978a9ff8..11767e76 100644 --- a/sql/0000010_trivy_misconfig.up.sql +++ b/sql/0000010_trivy_misconfig.up.sql @@ -17,7 +17,7 @@ CREATE TABLE IF NOT EXISTS trivy_misconfig ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/0000011_trivyimage.up.sql b/sql/0000011_trivyimage.up.sql index 3787fa59..acab7047 100644 --- a/sql/0000011_trivyimage.up.sql +++ b/sql/0000011_trivyimage.up.sql @@ -13,6 +13,6 @@ CREATE TABLE IF NOT EXISTS trivyimage ( vul_last_modified_date DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/0000012_dockerhubbuild.up.sql b/sql/0000012_dockerhubbuild.up.sql index a448994e..9050b61f 100644 --- a/sql/0000012_dockerhubbuild.up.sql +++ b/sql/0000012_dockerhubbuild.up.sql @@ -8,6 +8,6 @@ CREATE TABLE IF NOT EXISTS dockerhubbuild ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/0000013_azurecontainerpush.up.sql b/sql/0000013_azurecontainerpush.up.sql index 6dbb5aa2..e23c03a8 100644 --- a/sql/0000013_azurecontainerpush.up.sql +++ b/sql/0000013_azurecontainerpush.up.sql @@ -9,6 +9,6 @@ CREATE TABLE IF NOT EXISTS azurecontainerpush ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; \ No newline at end of file diff --git a/sql/0000014_quaycontainerpush.up.sql b/sql/0000014_quaycontainerpush.up.sql index 6304fcc7..1361d75d 100644 --- a/sql/0000014_quaycontainerpush.up.sql +++ b/sql/0000014_quaycontainerpush.up.sql @@ -9,6 +9,6 @@ CREATE TABLE IF NOT EXISTS quaycontainerpush ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/0000015_trivysbom.up.sql b/sql/0000015_trivysbom.up.sql index d93aa1bb..cab4007f 100644 --- a/sql/0000015_trivysbom.up.sql +++ b/sql/0000015_trivysbom.up.sql @@ -11,6 +11,6 @@ CREATE TABLE IF NOT EXISTS trivysbom ( other_component_name String, ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/0000016_azure_devops.up.sql b/sql/0000016_azure_devops.up.sql index bc7752a8..040a25b1 100644 --- a/sql/0000016_azure_devops.up.sql +++ b/sql/0000016_azure_devops.up.sql @@ -9,6 +9,6 @@ CREATE TABLE IF NOT EXISTS azure_devops ( Event String, ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/0000017_github.up.sql b/sql/0000017_github.up.sql index 2f627dd5..b10088c6 100644 --- a/sql/0000017_github.up.sql +++ b/sql/0000017_github.up.sql @@ -9,6 +9,6 @@ CREATE TABLE IF NOT EXISTS github ( Event String, ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/0000018_gitlab.up.sql b/sql/0000018_gitlab.up.sql index d47dd988..3fde07fc 100644 --- a/sql/0000018_gitlab.up.sql +++ b/sql/0000018_gitlab.up.sql @@ -9,6 +9,6 @@ CREATE TABLE IF NOT EXISTS gitlab ( Event String, ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/0000019_bitbucket.up.sql b/sql/0000019_bitbucket.up.sql index 778e2ea7..62c2adb6 100644 --- a/sql/0000019_bitbucket.up.sql +++ b/sql/0000019_bitbucket.up.sql @@ -9,6 +9,6 @@ CREATE TABLE IF NOT EXISTS bitbucket ( Event String, ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/000001_events.up.sql b/sql/000001_events.up.sql index d165653f..8905854f 100644 --- a/sql/000001_events.up.sql +++ b/sql/000001_events.up.sql @@ -15,6 +15,6 @@ CREATE TABLE IF NOT EXISTS events ( LastTime String, ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/0000020_gitea.up.sql b/sql/0000020_gitea.up.sql index 3232bc56..6767a05b 100644 --- a/sql/0000020_gitea.up.sql +++ b/sql/0000020_gitea.up.sql @@ -9,6 +9,6 @@ CREATE TABLE IF NOT EXISTS gitea ( Event String, ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/000002_rakkess.up.sql b/sql/000002_rakkess.up.sql index 0bbd400a..d4b66dc2 100644 --- a/sql/000002_rakkess.up.sql +++ b/sql/000002_rakkess.up.sql @@ -8,7 +8,7 @@ CREATE TABLE IF NOT EXISTS rakkess ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/000003_DeprecatedAPIs.up.sql b/sql/000003_DeprecatedAPIs.up.sql index 46be7b93..c8aa1f76 100644 --- a/sql/000003_DeprecatedAPIs.up.sql +++ b/sql/000003_DeprecatedAPIs.up.sql @@ -8,7 +8,7 @@ CREATE TABLE IF NOT EXISTS DeprecatedAPIs ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/000004_DeletedAPIs.up.sql b/sql/000004_DeletedAPIs.up.sql index d333b3a4..dbfe3360 100644 --- a/sql/000004_DeletedAPIs.up.sql +++ b/sql/000004_DeletedAPIs.up.sql @@ -10,6 +10,6 @@ CREATE TABLE IF NOT EXISTS DeletedAPIs ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/000005_jfrogcontainerpush.up.sql b/sql/000005_jfrogcontainerpush.up.sql index a3a98fa6..4f47f001 100644 --- a/sql/000005_jfrogcontainerpush.up.sql +++ b/sql/000005_jfrogcontainerpush.up.sql @@ -11,7 +11,7 @@ CREATE TABLE IF NOT EXISTS jfrogcontainerpush ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/000006_getall_resources.up.sql b/sql/000006_getall_resources.up.sql index 32168213..b77b322c 100644 --- a/sql/000006_getall_resources.up.sql +++ b/sql/000006_getall_resources.up.sql @@ -7,7 +7,7 @@ CREATE TABLE IF NOT EXISTS getall_resources ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/000007_outdated_images.up.sql b/sql/000007_outdated_images.up.sql index 9779aead..f89c6132 100644 --- a/sql/000007_outdated_images.up.sql +++ b/sql/000007_outdated_images.up.sql @@ -9,7 +9,7 @@ CREATE TABLE IF NOT EXISTS outdated_images ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/000008_kubescore.up.sql b/sql/000008_kubescore.up.sql index 2db441b1..db4086f1 100644 --- a/sql/000008_kubescore.up.sql +++ b/sql/000008_kubescore.up.sql @@ -15,7 +15,7 @@ CREATE TABLE IF NOT EXISTS kubescore ( EventTime DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate; diff --git a/sql/000009_trivy_vul.up.sql b/sql/000009_trivy_vul.up.sql index 2ebc670f..808a909f 100644 --- a/sql/000009_trivy_vul.up.sql +++ b/sql/000009_trivy_vul.up.sql @@ -17,7 +17,7 @@ CREATE TABLE IF NOT EXISTS trivy_vul ( vul_last_modified_date DateTime('UTC'), ExpiryDate DateTime DEFAULT now() + INTERVAL {{.TTLValue}} {{.TTLUnit}}, ExportedAt DateTime DEFAULT NULL -) ENGINE = MergeTree() -ORDER BY ExpiryDate +) ENGINE = MergeTree() +ORDER BY ExpiryDate TTL ExpiryDate;