diff --git a/.github/workflows/addReleaseLinks.yml b/.github/workflows/addReleaseLinks.yml index f9ff98486..75622164b 100644 --- a/.github/workflows/addReleaseLinks.yml +++ b/.github/workflows/addReleaseLinks.yml @@ -17,7 +17,7 @@ jobs: RELEASE_VERSION=$(echo "${{ github.event.release.tag_name }}" | sed 's/^v//') # Replace the place-holders '{version}' with the actual release version. - sed "s/{version}/$RELEASE_VERSION/g" ./.github/releaseLinkTemplate.md > ./temp_releaseLinkTemplate.md + sed "s/{version}/$RELEASE_VERSION/g" ./.github/RELEASE_LINK_TEMPLATE.md > ./temp_releaseLinkTemplate.md - name: Add links to release notes uses: softprops/action-gh-release@v1 diff --git a/.github/workflows/analysis.yml b/.github/workflows/analysis.yml index 1d9be52f3..ccc237c65 100644 --- a/.github/workflows/analysis.yml +++ b/.github/workflows/analysis.yml @@ -65,3 +65,15 @@ jobs: uses: securego/gosec@master with: args: -exclude G204,G301,G302,G304,G306 -tests -exclude-dir \.*test\.* ./... + + ShellCheck: + name: Shellcheck + runs-on: ubuntu-latest + steps: + - name: Checkout Source + uses: actions/checkout@v3 + + - name: Run ShellCheck + uses: ludeeus/action-shellcheck@master + with: + ignore_paths: '*test*' diff --git a/.github/workflows/dockerTests.yml b/.github/workflows/dockerTests.yml index 0c8c02535..1a30ece44 100644 --- a/.github/workflows/dockerTests.yml +++ b/.github/workflows/dockerTests.yml @@ -21,8 +21,7 @@ jobs: - name: Install Go uses: actions/setup-go@v3 with: - # We are temporarily downgrading to Go 1.20.5 due to a bug in version 1.20.6 that causes Docker tests to fail. - go-version: 1.20.5 + go-version: 1.20.x - name: Checkout code uses: actions/checkout@v3 with: diff --git a/.github/workflows/scriptTests.yml b/.github/workflows/scriptTests.yml index 7201edeeb..95c2cce63 100644 --- a/.github/workflows/scriptTests.yml +++ b/.github/workflows/scriptTests.yml @@ -12,25 +12,60 @@ concurrency: cancel-in-progress: true jobs: Scripts-tests: - name: Script tests (${{ matrix.os }}) + name: Script tests (${{ matrix.suite.os }}) defaults: run: shell: bash strategy: fail-fast: false matrix: - os: [ ubuntu, windows, macos ] - runs-on: ${{ matrix.os }}-latest + suite: + - os: "ubuntu" + + - os: "macos" + + - os: "windows" + osSuffix: ".exe" + runs-on: ${{ matrix.suite.os }}-latest steps: - name: Checkout code uses: actions/checkout@v3 with: ref: ${{ github.event.pull_request.head.sha }} + - name: Test install CLI - jf run: sh build/installcli/jf.sh && jf --version + - name: Test install CLI - jfrog run: sh build/installcli/jfrog.sh && jfrog --version + - name: Test get CLI - jf run: sh build/getcli/jf.sh && ./jf --version + - name: Test get CLI - jfrog run: sh build/getcli/jfrog.sh && ./jfrog --version + + - name: Test Build CLI - sh + run: | + sh build/build.sh + ./jf${{ matrix.suite.osSuffix }} --version + if: ${{ matrix.suite.os != 'windows' }} + + - name: Test Build CLI - bat + run: | + build/build.bat + ./jf${{ matrix.suite.osSuffix }} --version + if: ${{ matrix.suite.os == 'windows' }} + + - name: Test install npm - v2 + working-directory: build/npm/v2 + run: | + npm install + bin/jfrog${{ matrix.suite.osSuffix }} --version + + - name: Test install npm - v2-jf + working-directory: build/npm/v2-jf + run: | + npm install + bin/jf${{ matrix.suite.osSuffix }} --version + diff --git a/.github/workflows/xrayTests.yml b/.github/workflows/xrayTests.yml index a8f2df07d..afb659a97 100644 --- a/.github/workflows/xrayTests.yml +++ b/.github/workflows/xrayTests.yml @@ -23,8 +23,6 @@ jobs: runs-on: ${{ matrix.os }}-latest env: GRADLE_OPTS: -Dorg.gradle.daemon=false - # Run Xray tests with latest Analyzer - JFROG_CLI_ANALYZER_MANAGER_VERSION: "[RELEASE]" steps: - name: Install Go uses: actions/setup-go@v3 diff --git a/Jenkinsfile b/Jenkinsfile index 88627412b..f2d904873 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -25,7 +25,7 @@ node("docker") { repo = 'jfrog-cli' sh 'rm -rf temp' sh 'mkdir temp' - def goRoot = tool 'go-1.20.5' + def goRoot = tool 'go-1.20.8' env.GOROOT="$goRoot" env.PATH+=":${goRoot}/bin" env.GO111MODULE="on" diff --git a/artifactory_test.go b/artifactory_test.go index 1b2dd01ac..4330130a7 100644 --- a/artifactory_test.go +++ b/artifactory_test.go @@ -1,6 +1,7 @@ package main import ( + "archive/zip" "bytes" "crypto/tls" "encoding/csv" @@ -4166,6 +4167,38 @@ func TestUploadDeploymentViewWithArchive(t *testing.T) { cleanArtifactoryTest() } +func TestUploadZipAndCheckDeploymentViewWithArchive(t *testing.T) { + initArtifactoryTest(t, "") + + // Create tmp dir + assert.NoError(t, os.Mkdir(tests.Out, 0755)) + wd, err := os.Getwd() + assert.NoError(t, err) + defer cleanArtifactoryTest() + chdirCallback := clientTestUtils.ChangeDirWithCallback(t, wd, tests.Out) + defer chdirCallback() + + // Create file and a zip + fileName := "dummy_file.txt" + zipName := "test.zip" + assert.NoError(t, os.WriteFile(fileName, nil, 0644)) + + // Upload & download zip file + assert.NoError(t, artifactoryCli.Exec("upload", fileName, path.Join(tests.RtRepo1, zipName), "--archive", "zip")) + assert.NoError(t, artifactoryCli.Exec("download", path.Join(tests.RtRepo1, zipName))) + + // Check for time-zone offset for each file in the zip + r, err := zip.OpenReader(zipName) + assert.NoError(t, err) + defer func() { assert.NoError(t, r.Close()) }() + _, sysTimezoneOffset := time.Now().Zone() + for _, file := range r.File { + _, fileTimezoneOffset := file.Modified.Zone() + assert.Equal(t, sysTimezoneOffset, fileTimezoneOffset) + } + +} + func TestUploadDetailedSummary(t *testing.T) { initArtifactoryTest(t, "") uploadCmd := generic.NewUploadCommand() @@ -5601,7 +5634,7 @@ func testProjectInit(t *testing.T, projectExampleName string, technology coreuti // Copy a simple project in a temp work dir tmpWorkDir, deleteWorkDir := coretests.CreateTempDirWithCallbackAndAssert(t) defer deleteWorkDir() - testdataSrc := filepath.Join(filepath.FromSlash(tests.GetTestResourcesPath()), technology.ToString(), projectExampleName) + testdataSrc := filepath.Join(filepath.FromSlash(tests.GetTestResourcesPath()), technology.String(), projectExampleName) err = biutils.CopyDir(testdataSrc, tmpWorkDir, true, nil) assert.NoError(t, err) if technology == coreutils.Go { @@ -5619,7 +5652,7 @@ func testProjectInit(t *testing.T, projectExampleName string, technology coreuti err = platformCli.WithoutCredentials().Exec("project", "init", "--path", tmpWorkDir, "--server-id="+tests.ServerId) assert.NoError(t, err) // Validate correctness of .jfrog/projects/$technology.yml - validateProjectYamlFile(t, tmpWorkDir, technology.ToString()) + validateProjectYamlFile(t, tmpWorkDir, technology.String()) // Validate correctness of .jfrog/projects/build.yml validateBuildYamlFile(t, tmpWorkDir) } diff --git a/build/build.sh b/build/build.sh index ea46362c1..0cc3e14eb 100755 --- a/build/build.sh +++ b/build/build.sh @@ -8,5 +8,5 @@ if [ $# -eq 0 ] exe_name="$1" fi -CGO_ENABLED=0 go build -o $exe_name -ldflags '-w -extldflags "-static"' main.go +CGO_ENABLED=0 go build -o "$exe_name" -ldflags '-w -extldflags "-static"' main.go echo "The $exe_name executable was successfully created." diff --git a/build/chocolatey/v2-jf/README.md b/build/chocolatey/v2-jf/README.md index c85de6a4c..bc20431eb 100644 --- a/build/chocolatey/v2-jf/README.md +++ b/build/chocolatey/v2-jf/README.md @@ -24,13 +24,13 @@ choco pack version= ``` This will create the file _build/chocolatey/jfrog-cli.\.nupkg which can be -installed with Chcolatey +installed with Chocolatey ```powershell choco install jfrog-cli..nupkg ``` -See Chocolatey's official documenattion [here](https://chocolatey.org/docs/create-packages) +See Chocolatey's official documentation [here](https://chocolatey.org/docs/create-packages) [choco-dockerfile-pr]: https://github.com/chocolatey/choco/pull/1153 [choco-dockerfile]: https://github.com/chocolatey/choco/tree/master/docker diff --git a/build/chocolatey/v2/README.md b/build/chocolatey/v2/README.md index c85de6a4c..bc20431eb 100644 --- a/build/chocolatey/v2/README.md +++ b/build/chocolatey/v2/README.md @@ -24,13 +24,13 @@ choco pack version= ``` This will create the file _build/chocolatey/jfrog-cli.\.nupkg which can be -installed with Chcolatey +installed with Chocolatey ```powershell choco install jfrog-cli..nupkg ``` -See Chocolatey's official documenattion [here](https://chocolatey.org/docs/create-packages) +See Chocolatey's official documentation [here](https://chocolatey.org/docs/create-packages) [choco-dockerfile-pr]: https://github.com/chocolatey/choco/pull/1153 [choco-dockerfile]: https://github.com/chocolatey/choco/tree/master/docker diff --git a/build/deb_rpm/v2-jf/build-scripts/deb-install.sh b/build/deb_rpm/v2-jf/build-scripts/deb-install.sh index 6e4651be5..d3e1cd727 100644 --- a/build/deb_rpm/v2-jf/build-scripts/deb-install.sh +++ b/build/deb_rpm/v2-jf/build-scripts/deb-install.sh @@ -1,3 +1,5 @@ +#!/bin/bash + wget -qO - https://releases.jfrog.io/artifactory/api/gpg/key/public | apt-key add -; echo "deb https://releases.jfrog.io/artifactory/jfrog-debs xenial contrib" | sudo tee -a /etc/apt/sources.list; apt update; diff --git a/build/deb_rpm/v2-jf/build-scripts/pack.sh b/build/deb_rpm/v2-jf/build-scripts/pack.sh index 6d258a119..7a02590aa 100755 --- a/build/deb_rpm/v2-jf/build-scripts/pack.sh +++ b/build/deb_rpm/v2-jf/build-scripts/pack.sh @@ -46,12 +46,11 @@ errorExit() { } checkDockerAccess() { - log "Checking docker" "DEBUG" - docker -v > /dev/null 2>&1 && docker ps > /dev/null 2>&1 - if [ $? -ne 0 ]; then - errorExit "Must run as user that can execute docker commands" + if docker -v > /dev/null 2>&1 && docker ps > /dev/null 2>&1; then + log "Docker is available" "DEBUG" + else + errorExit "Must run as a user that can execute docker commands" fi - log "Docker is avaiable" "DEBUG" } exitWithUsage(){ @@ -64,7 +63,7 @@ createDEBPackage(){ local flavour="deb" # cleanup old files and containers - rm -f ${JFROG_CLI_PKG}/${JFROG_CLI_PREFIX}*${VERSION_FORMATTED}*.${flavour} + rm -f "${JFROG_CLI_PKG}/${JFROG_CLI_PREFIX}*${VERSION_FORMATTED}*.${flavour}" docker rm -f "${RPM_BUILDER_NAME}" 2>/dev/null log "Building ${JFROG_CLI_PREFIX} ${flavour} ${JFROG_CLI_VERSION} on ${DEB_BUILD_IMAGE} image" @@ -72,7 +71,7 @@ createDEBPackage(){ docker run -t --rm -v "${JFROG_CLI_HOME}/${flavour}":${DEB_IMAGE_ROOT_DIR}/src \ -v "${JFROG_CLI_PKG}":${DEB_IMAGE_ROOT_DIR}/pkg \ --name ${DEB_BUILDER_NAME} \ - ${DEB_BUILD_IMAGE} bash -c "\ + "${DEB_BUILD_IMAGE}" bash -c "\ \ echo '' && echo '' && \ apt-get update && \ @@ -107,7 +106,7 @@ createRPMPackage(){ local flavour="rpm" # cleanup old files and containers - rm -f ${JFROG_CLI_PKG}/${JFROG_CLI_PREFIX}*${VERSION_FORMATTED}*.${flavour} + rm -f "${JFROG_CLI_PKG}/${JFROG_CLI_PREFIX}*${VERSION_FORMATTED}*.${flavour}" docker rm -f "${RPM_BUILDER_NAME}" 2>/dev/null log "Building ${JFROG_CLI_PREFIX} ${flavour} ${JFROG_CLI_VERSION} on ${RPM_BUILD_IMAGE} image" @@ -115,7 +114,7 @@ createRPMPackage(){ docker run -t --rm -v "${JFROG_CLI_HOME}/${flavour}":${RPM_IMAGE_ROOT_DIR}/src \ -v "${JFROG_CLI_PKG}":${RPM_IMAGE_ROOT_DIR}/pkg \ --name ${RPM_BUILDER_NAME} \ - ${RPM_BUILD_IMAGE} bash -c "\ + "${RPM_BUILD_IMAGE}" bash -c "\ echo '' && echo '' && \ yum install -y ${RPM_DEPS} && \ echo '' && echo '' && \ @@ -159,10 +158,10 @@ rpmSign()( if [[ -f "${filePath}" && -f "${gpgFileInHost}" ]]; then log ""; log ""; log "Initiating rpm sign on ${filePath}..." - docker run --rm --name cli-rpm-sign -v "${filePath}":${filePathInImage} \ + docker run --rm --name cli-rpm-sign -v "${filePath}:${filePathInImage}" \ -v "${gpgFileInHost}":"${gpgFileInImage}" \ -v "${JFROG_CLI_HOME}/build-scripts":${RPM_IMAGE_ROOT_DIR}/src \ - ${RPM_SIGN_IMAGE} \ + "${RPM_SIGN_IMAGE}" \ bash -c "yum install -y expect rpm-sign pinentry && \ ${RPM_IMAGE_ROOT_DIR}/src/${rpmSignScript} \"${gpgFileInImage}\" \"${keYID}\" \"${passphrase}\" \"${filePathInImage}\" \ && exit 0 || exit 1" \ @@ -178,7 +177,7 @@ rpmSign()( runTests()( local flavour=$1 - [ ! -z "${flavour}" ] || { echo "Flavour is mandatory to run tests"; exit 1; } + [ -n "${flavour}" ] || { echo "Flavour is mandatory to run tests"; exit 1; } local fileName="${JFROG_CLI_PREFIX}-${VERSION_FORMATTED}.${flavour}" local filePath="${JFROG_CLI_PKG}"/${fileName} @@ -199,7 +198,7 @@ runTests()( if [ -f "${filePath}" ]; then log ""; log ""; log "Testing ${filePath} on ${testImage}..." - docker run --rm --name cli-test -v "${filePath}":${filePathInImage} ${testImage} \ + docker run --rm --name cli-test -v "${filePath}:${filePathInImage}" "${testImage}" \ bash -c "${installCommand} && jf -version | grep ${JFROG_CLI_VERSION} && \ ${signatureTestCommand} && exit 0 || exit 1" \ || { echo "ERROR: ############### Test failed! ###################"; exit 1; } @@ -214,28 +213,28 @@ runTests()( getArch(){ local image=$1 - [ ! -z "$image" ] || return 0; + [ -n "$image" ] || return 0; - docker run --rm ${image} bash -c "uname -m 2>/dev/null" 2>/dev/null + docker run --rm "${image}" bash -c "uname -m 2>/dev/null" 2>/dev/null } createPackage(){ local flavour=$1 - [ ! -z "${flavour}" ] || errorExit "Flavour is not passed to createPackage method" + [ -n "${flavour}" ] || errorExit "Flavour is not passed to createPackage method" - cp -f "${JFROG_CLI_BINARY}" "${JFROG_CLI_HOME}"/${flavour}/jf \ + cp -f "${JFROG_CLI_BINARY}" "${JFROG_CLI_HOME}"/"${flavour}"/jf \ || errorExit "Failed to copy ${JFROG_CLI_BINARY} to ${JFROG_CLI_HOME}/${flavour}/jf" case "$flavour" in rpm) - [ ! -z "${JFROG_CLI_RPM_ARCH}" ] || JFROG_CLI_RPM_ARCH=$(getArch "${RPM_BUILD_IMAGE}") + [ -n "${JFROG_CLI_RPM_ARCH}" ] || JFROG_CLI_RPM_ARCH=$(getArch "${RPM_BUILD_IMAGE}") VERSION_FORMATTED="${JFROG_CLI_VERSION}.${JFROG_CLI_RPM_ARCH}" createRPMPackage ;; deb) - [ ! -z "${JFROG_CLI_DEB_ARCH}" ] || JFROG_CLI_DEB_ARCH=$(getArch "${DEB_BUILD_IMAGE}") + [ -n "${JFROG_CLI_DEB_ARCH}" ] || JFROG_CLI_DEB_ARCH=$(getArch "${DEB_BUILD_IMAGE}") VERSION_FORMATTED="${JFROG_CLI_VERSION}.${JFROG_CLI_DEB_ARCH}" createDEBPackage ;; @@ -248,7 +247,7 @@ createPackage(){ setBuildImage(){ local arch="$1" - [ ! -z "${arch}" ] || errorExit "Architecture is not passed to setBuildImage method" + [ -n "${arch}" ] || errorExit "Architecture is not passed to setBuildImage method" case "$1" in x86_64) @@ -262,7 +261,7 @@ setBuildImage(){ } main(){ - while [[ $# > 0 ]]; do + while [[ $# -gt 0 ]]; do case "$1" in -f | --flavour) flavours="$2" @@ -323,21 +322,21 @@ main(){ esac done - : ${flavours:="rpm deb"} - : ${JFROG_CLI_RUN_TEST:="false"} - : ${RPM_BUILD_IMAGE:="centos:8"} - : ${RPM_SIGN_IMAGE:="centos:7"} - : ${DEB_BUILD_IMAGE:="ubuntu:16.04"} - : ${DEB_TEST_IMAGE:="${DEB_BUILD_IMAGE}"} - : ${RPM_TEST_IMAGE:="${RPM_BUILD_IMAGE}"} - : ${JFROG_CLI_RELEASE_VERSION:="1"} - : ${RPM_SIGN_PASSPHRASE:="$(cat $RPM_SIGN_PASSPHRASE_FILE)"} - : ${RPM_SIGN_KEY_ID:="JFrog Inc."} - : ${RPM_SIGN_KEY_NAME:="RPM-GPG-KEY-jfrog-cli"} - - [ ! -z "${JFROG_CLI_BINARY}" ] || exitWithUsage "jfrog cli binary is not passed" + : "${flavours:="rpm deb"}" + : "${JFROG_CLI_RUN_TEST:="false"}" + : "${RPM_BUILD_IMAGE:="centos:8"}" + : "${RPM_SIGN_IMAGE:="centos:7"}" + : "${DEB_BUILD_IMAGE:="ubuntu:16.04"}" + : "${DEB_TEST_IMAGE:="${DEB_BUILD_IMAGE}"}" + : "${RPM_TEST_IMAGE:="${RPM_BUILD_IMAGE}"}" + : "${JFROG_CLI_RELEASE_VERSION:="1"}" + : "${RPM_SIGN_PASSPHRASE:=$(cat "$RPM_SIGN_PASSPHRASE_FILE")}" + : "${RPM_SIGN_KEY_ID:="JFrog Inc."}" + : "${RPM_SIGN_KEY_NAME:="RPM-GPG-KEY-jfrog-cli"}" + + [ -n "${JFROG_CLI_BINARY}" ] || exitWithUsage "jfrog cli binary is not passed" [ -f "$JFROG_CLI_BINARY" ] || exitWithUsage "jfrog cli binary is not available at $JFROG_CLI_BINARY" - [ ! -z "${JFROG_CLI_VERSION}" ] || exitWithUsage "version is not passed, pass the version to be built" + [ -n "${JFROG_CLI_VERSION}" ] || exitWithUsage "version is not passed, pass the version to be built" if [[ "$flavours" == *"rpm"* ]] && [[ -z "${RPM_SIGN_PASSPHRASE}" || "${RPM_SIGN_PASSPHRASE}" == "" ]]; then echo "ERROR: RPM_SIGN_PASSPHRASE environment variable is not set" @@ -351,8 +350,13 @@ main(){ for flavour in $flavours; do createPackage "$flavour" - [[ "${flavour}" == "rpm" ]] && rpmSign || true - [[ "${JFROG_CLI_RUN_TEST}" == "true" ]] && runTests "${flavour}" || true + if [[ "${flavour}" == "rpm" ]]; then + rpmSign + fi + + if [[ "${JFROG_CLI_RUN_TEST}" == "true" ]]; then + runTests "${flavour}" + fi done log "...and Done!" diff --git a/build/deb_rpm/v2-jf/build-scripts/rpm-install.sh b/build/deb_rpm/v2-jf/build-scripts/rpm-install.sh index 4b42e5864..6eaceadb5 100644 --- a/build/deb_rpm/v2-jf/build-scripts/rpm-install.sh +++ b/build/deb_rpm/v2-jf/build-scripts/rpm-install.sh @@ -1,7 +1,8 @@ -echo "[jfrog-cli]" > jfrog-cli.repo; -echo "name=jfrog-cli" >> jfrog-cli.repo; -echo "baseurl=https://releases.jfrog.io/artifactory/jfrog-rpms" >> jfrog-cli.repo; -echo "enabled=1" >> jfrog-cli.repo; -echo "gpgcheck=0" >> jfrog-cli.repo; -sudo mv jfrog-cli.repo /etc/yum.repos.d/; +#!/bin/bash + +echo -e "[jfrog-cli] +name=jfrog-cli +baseurl=https://releases.jfrog.io/artifactory/jfrog-rpms +enabled=1 +gpgcheck=0" | sudo tee /etc/yum.repos.d/jfrog-cli.repo >/dev/null yum install -y jfrog-cli-v2-jf; diff --git a/build/deb_rpm/v2-jf/build-scripts/rpm-sign.sh b/build/deb_rpm/v2-jf/build-scripts/rpm-sign.sh index 5a94ce9fe..f76d1d0c7 100755 --- a/build/deb_rpm/v2-jf/build-scripts/rpm-sign.sh +++ b/build/deb_rpm/v2-jf/build-scripts/rpm-sign.sh @@ -23,11 +23,14 @@ rpmInitSigning(){ log "Initializing rpm sign..." - gpg --allow-secret-key-import --import ${gpgKeyFile} && \ + gpg --allow-secret-key-import --import "${gpgKeyFile}" && \ gpg --export -a "${keyID}" > /tmp/tmpFile && \ - rpm --import /tmp/tmpFile && \ - rpm -q gpg-pubkey --qf '%{name}-%{version}-%{release} --> %{summary}\n' | grep "${keyID}" || \ - { echo "ERROR: RPM signature initialization failed!" >&2; exit 1; } + if rpm --import /tmp/tmpFile && rpm -q gpg-pubkey --qf '%{name}-%{version}-%{release} --> %{summary}\n' | grep "${keyID}"; then + echo "RPM signature initialization succeeded." + else + echo "ERROR: RPM signature initialization failed!" >&2 + exit 1 + fi rpmEditRpmMacro "${keyID}" || \ { echo "ERROR: Configuring rpm macro failed!" >&2; exit 1; } diff --git a/build/deb_rpm/v2/build-scripts/deb-install.sh b/build/deb_rpm/v2/build-scripts/deb-install.sh index 3540fc6ca..20253e4ff 100644 --- a/build/deb_rpm/v2/build-scripts/deb-install.sh +++ b/build/deb_rpm/v2/build-scripts/deb-install.sh @@ -1,3 +1,5 @@ +#!/bin/bash + wget -qO - https://releases.jfrog.io/artifactory/api/gpg/key/public | apt-key add -; echo "deb https://releases.jfrog.io/artifactory/jfrog-debs xenial contrib" | sudo tee -a /etc/apt/sources.list; apt update; diff --git a/build/deb_rpm/v2/build-scripts/pack.sh b/build/deb_rpm/v2/build-scripts/pack.sh index b47269c7a..90e904962 100755 --- a/build/deb_rpm/v2/build-scripts/pack.sh +++ b/build/deb_rpm/v2/build-scripts/pack.sh @@ -46,12 +46,11 @@ errorExit() { } checkDockerAccess() { - log "Checking docker" "DEBUG" - docker -v > /dev/null 2>&1 && docker ps > /dev/null 2>&1 - if [ $? -ne 0 ]; then - errorExit "Must run as user that can execute docker commands" - fi - log "Docker is avaiable" "DEBUG" +if docker -v > /dev/null 2>&1 && docker ps > /dev/null 2>&1; then + log "Docker is available" "DEBUG" +else + errorExit "Must run as a user that can execute docker commands" +fi } exitWithUsage(){ @@ -64,7 +63,7 @@ createDEBPackage(){ local flavour="deb" # cleanup old files and containers - rm -f ${JFROG_CLI_PKG}/${JFROG_CLI_PREFIX}*${VERSION_FORMATTED}*.${flavour} + rm -f "${JFROG_CLI_PKG}/${JFROG_CLI_PREFIX}*${VERSION_FORMATTED}*.${flavour}" docker rm -f "${RPM_BUILDER_NAME}" 2>/dev/null log "Building ${JFROG_CLI_PREFIX} ${flavour} ${JFROG_CLI_VERSION} on ${DEB_BUILD_IMAGE} image" @@ -72,7 +71,7 @@ createDEBPackage(){ docker run -t --rm -v "${JFROG_CLI_HOME}/${flavour}":${DEB_IMAGE_ROOT_DIR}/src \ -v "${JFROG_CLI_PKG}":${DEB_IMAGE_ROOT_DIR}/pkg \ --name ${DEB_BUILDER_NAME} \ - ${DEB_BUILD_IMAGE} bash -c "\ + "${DEB_BUILD_IMAGE}" bash -c "\ \ echo '' && echo '' && \ apt-get update && \ @@ -107,7 +106,7 @@ createRPMPackage(){ local flavour="rpm" # cleanup old files and containers - rm -f ${JFROG_CLI_PKG}/${JFROG_CLI_PREFIX}*${VERSION_FORMATTED}*.${flavour} + rm -f "${JFROG_CLI_PKG}/${JFROG_CLI_PREFIX}*${VERSION_FORMATTED}*.${flavour}" docker rm -f "${RPM_BUILDER_NAME}" 2>/dev/null log "Building ${JFROG_CLI_PREFIX} ${flavour} ${JFROG_CLI_VERSION} on ${RPM_BUILD_IMAGE} image" @@ -115,7 +114,7 @@ createRPMPackage(){ docker run -t --rm -v "${JFROG_CLI_HOME}/${flavour}":${RPM_IMAGE_ROOT_DIR}/src \ -v "${JFROG_CLI_PKG}":${RPM_IMAGE_ROOT_DIR}/pkg \ --name ${RPM_BUILDER_NAME} \ - ${RPM_BUILD_IMAGE} bash -c "\ + "${RPM_BUILD_IMAGE}" bash -c "\ echo '' && echo '' && \ yum install -y ${RPM_DEPS} && \ echo '' && echo '' && \ @@ -159,10 +158,10 @@ rpmSign()( if [[ -f "${filePath}" && -f "${gpgFileInHost}" ]]; then log ""; log ""; log "Initiating rpm sign on ${filePath}..." - docker run --rm --name cli-rpm-sign -v "${filePath}":${filePathInImage} \ + docker run --rm --name cli-rpm-sign -v "${filePath}:${filePathInImage}" \ -v "${gpgFileInHost}":"${gpgFileInImage}" \ -v "${JFROG_CLI_HOME}/build-scripts":${RPM_IMAGE_ROOT_DIR}/src \ - ${RPM_SIGN_IMAGE} \ + "${RPM_SIGN_IMAGE}" \ bash -c "yum install -y expect rpm-sign pinentry && \ ${RPM_IMAGE_ROOT_DIR}/src/${rpmSignScript} \"${gpgFileInImage}\" \"${keYID}\" \"${passphrase}\" \"${filePathInImage}\" \ && exit 0 || exit 1" \ @@ -178,7 +177,7 @@ rpmSign()( runTests()( local flavour=$1 - [ ! -z "${flavour}" ] || { echo "Flavour is mandatory to run tests"; exit 1; } + [ -n "${flavour}" ] || { echo "Flavour is mandatory to run tests"; exit 1; } local fileName="${JFROG_CLI_PREFIX}-${VERSION_FORMATTED}.${flavour}" local filePath="${JFROG_CLI_PKG}"/${fileName} @@ -199,7 +198,7 @@ runTests()( if [ -f "${filePath}" ]; then log ""; log ""; log "Testing ${filePath} on ${testImage}..." - docker run --rm --name cli-test -v "${filePath}":${filePathInImage} ${testImage} \ + docker run --rm --name cli-test -v "${filePath}:${filePathInImage}" "${testImage}" \ bash -c "${installCommand} && jfrog -version | grep ${JFROG_CLI_VERSION} && \ ${signatureTestCommand} && exit 0 || exit 1" \ || { echo "ERROR: ############### Test failed! ###################"; exit 1; } @@ -214,28 +213,28 @@ runTests()( getArch(){ local image=$1 - [ ! -z "$image" ] || return 0; + [ -n "$image" ] || return 0; - docker run --rm ${image} bash -c "uname -m 2>/dev/null" 2>/dev/null + docker run --rm "${image}" bash -c "uname -m 2>/dev/null" 2>/dev/null } createPackage(){ local flavour=$1 - [ ! -z "${flavour}" ] || errorExit "Flavour is not passed to createPackage method" + [ -n "${flavour}" ] || errorExit "Flavour is not passed to createPackage method" - cp -f "${JFROG_CLI_BINARY}" "${JFROG_CLI_HOME}"/${flavour}/jfrog \ + cp -f "${JFROG_CLI_BINARY}" "${JFROG_CLI_HOME}"/"${flavour}"/jfrog \ || errorExit "Failed to copy ${JFROG_CLI_BINARY} to ${JFROG_CLI_HOME}/${flavour}/jfrog" case "$flavour" in rpm) - [ ! -z "${JFROG_CLI_RPM_ARCH}" ] || JFROG_CLI_RPM_ARCH=$(getArch "${RPM_BUILD_IMAGE}") + [ -n "${JFROG_CLI_RPM_ARCH}" ] || JFROG_CLI_RPM_ARCH=$(getArch "${RPM_BUILD_IMAGE}") VERSION_FORMATTED="${JFROG_CLI_VERSION}.${JFROG_CLI_RPM_ARCH}" createRPMPackage ;; deb) - [ ! -z "${JFROG_CLI_DEB_ARCH}" ] || JFROG_CLI_DEB_ARCH=$(getArch "${DEB_BUILD_IMAGE}") + [ -n "${JFROG_CLI_DEB_ARCH}" ] || JFROG_CLI_DEB_ARCH=$(getArch "${DEB_BUILD_IMAGE}") VERSION_FORMATTED="${JFROG_CLI_VERSION}.${JFROG_CLI_DEB_ARCH}" createDEBPackage ;; @@ -248,7 +247,7 @@ createPackage(){ setBuildImage(){ local arch="$1" - [ ! -z "${arch}" ] || errorExit "Architecture is not passed to setBuildImage method" + [ -n "${arch}" ] || errorExit "Architecture is not passed to setBuildImage method" case "$1" in x86_64) @@ -262,7 +261,7 @@ setBuildImage(){ } main(){ - while [[ $# > 0 ]]; do + while [[ $# -gt 0 ]]; do case "$1" in -f | --flavour) flavours="$2" @@ -323,21 +322,21 @@ main(){ esac done - : ${flavours:="rpm deb"} - : ${JFROG_CLI_RUN_TEST:="false"} - : ${RPM_BUILD_IMAGE:="centos:8"} - : ${RPM_SIGN_IMAGE:="centos:7"} - : ${DEB_BUILD_IMAGE:="ubuntu:16.04"} - : ${DEB_TEST_IMAGE:="${DEB_BUILD_IMAGE}"} - : ${RPM_TEST_IMAGE:="${RPM_BUILD_IMAGE}"} - : ${JFROG_CLI_RELEASE_VERSION:="1"} - : ${RPM_SIGN_PASSPHRASE:="$(cat $RPM_SIGN_PASSPHRASE_FILE)"} - : ${RPM_SIGN_KEY_ID:="JFrog Inc."} - : ${RPM_SIGN_KEY_NAME:="RPM-GPG-KEY-jfrog-cli"} - - [ ! -z "${JFROG_CLI_BINARY}" ] || exitWithUsage "jfrog cli binary is not passed" + : "${flavours:="rpm deb"}" + : "${JFROG_CLI_RUN_TEST:="false"}" + : "${RPM_BUILD_IMAGE:="centos:8"}" + : "${RPM_SIGN_IMAGE:="centos:7"}" + : "${DEB_BUILD_IMAGE:="ubuntu:16.04"}" + : "${DEB_TEST_IMAGE:="${DEB_BUILD_IMAGE}"}" + : "${RPM_TEST_IMAGE:="${RPM_BUILD_IMAGE}"}" + : "${JFROG_CLI_RELEASE_VERSION:="1"}" + : "${RPM_SIGN_PASSPHRASE:=$(cat "$RPM_SIGN_PASSPHRASE_FILE")}" + : "${RPM_SIGN_KEY_ID:="JFrog Inc."}" + : "${RPM_SIGN_KEY_NAME:="RPM-GPG-KEY-jfrog-cli"}" + + [ -n "${JFROG_CLI_BINARY}" ] || exitWithUsage "jfrog cli binary is not passed" [ -f "$JFROG_CLI_BINARY" ] || exitWithUsage "jfrog cli binary is not available at $JFROG_CLI_BINARY" - [ ! -z "${JFROG_CLI_VERSION}" ] || exitWithUsage "version is not passed, pass the version to be built" + [ -n "${JFROG_CLI_VERSION}" ] || exitWithUsage "version is not passed, pass the version to be built" if [[ "$flavours" == *"rpm"* ]] && [[ -z "${RPM_SIGN_PASSPHRASE}" || "${RPM_SIGN_PASSPHRASE}" == "" ]]; then echo "ERROR: RPM_SIGN_PASSPHRASE environment variable is not set" @@ -351,8 +350,13 @@ main(){ for flavour in $flavours; do createPackage "$flavour" - [[ "${flavour}" == "rpm" ]] && rpmSign || true - [[ "${JFROG_CLI_RUN_TEST}" == "true" ]] && runTests "${flavour}" || true + if [[ "${flavour}" == "rpm" ]]; then + rpmSign + fi + + if [[ "${JFROG_CLI_RUN_TEST}" == "true" ]]; then + runTests "${flavour}" + fi done log "...and Done!" diff --git a/build/deb_rpm/v2/build-scripts/rpm-install.sh b/build/deb_rpm/v2/build-scripts/rpm-install.sh index 98849e879..5ac712e88 100644 --- a/build/deb_rpm/v2/build-scripts/rpm-install.sh +++ b/build/deb_rpm/v2/build-scripts/rpm-install.sh @@ -1,8 +1,9 @@ -echo "[jfrog-cli]" > jfrog-cli.repo; -echo "name=jfrog-cli" >> jfrog-cli.repo; -echo "baseurl=https://releases.jfrog.io/artifactory/jfrog-rpms" >> jfrog-cli.repo; -echo "enabled=1" >> jfrog-cli.repo; -echo "gpgcheck=0" >> jfrog-cli.repo; -sudo mv jfrog-cli.repo /etc/yum.repos.d/; +#!/bin/bash + +echo -e "[jfrog-cli] +name=jfrog-cli +baseurl=https://releases.jfrog.io/artifactory/jfrog-rpms +enabled=1 +gpgcheck=0" | sudo tee /etc/yum.repos.d/jfrog-cli.repo >/dev/null yum install -y jfrog-cli-v2; jf intro; diff --git a/build/deb_rpm/v2/build-scripts/rpm-sign.sh b/build/deb_rpm/v2/build-scripts/rpm-sign.sh index 5a94ce9fe..f76d1d0c7 100755 --- a/build/deb_rpm/v2/build-scripts/rpm-sign.sh +++ b/build/deb_rpm/v2/build-scripts/rpm-sign.sh @@ -23,11 +23,14 @@ rpmInitSigning(){ log "Initializing rpm sign..." - gpg --allow-secret-key-import --import ${gpgKeyFile} && \ + gpg --allow-secret-key-import --import "${gpgKeyFile}" && \ gpg --export -a "${keyID}" > /tmp/tmpFile && \ - rpm --import /tmp/tmpFile && \ - rpm -q gpg-pubkey --qf '%{name}-%{version}-%{release} --> %{summary}\n' | grep "${keyID}" || \ - { echo "ERROR: RPM signature initialization failed!" >&2; exit 1; } + if rpm --import /tmp/tmpFile && rpm -q gpg-pubkey --qf '%{name}-%{version}-%{release} --> %{summary}\n' | grep "${keyID}"; then + echo "RPM signature initialization succeeded." + else + echo "ERROR: RPM signature initialization failed!" >&2 + exit 1 + fi rpmEditRpmMacro "${keyID}" || \ { echo "ERROR: Configuring rpm macro failed!" >&2; exit 1; } diff --git a/build/gitlab/v2/.setup-jfrog-unix.yml b/build/gitlab/v2/.setup-jfrog-unix.yml index cbef2bb81..4f8f78cfa 100644 --- a/build/gitlab/v2/.setup-jfrog-unix.yml +++ b/build/gitlab/v2/.setup-jfrog-unix.yml @@ -39,7 +39,7 @@ # Build the URL to the executable matching the OS. - | - if $(echo "${OSTYPE}" | grep -q darwin); then + if echo "${OSTYPE}" | grep -q darwin; then CLI_OS="mac" if [[ $(uname -m) == 'arm64' ]]; then URL="${BASE_URL}/jfrog-cli/v2-jf/${VERSION}/jfrog-cli-mac-arm64/jf" diff --git a/build/installcli/jf.sh b/build/installcli/jf.sh index b3a8c5009..6e4ca516b 100755 --- a/build/installcli/jf.sh +++ b/build/installcli/jf.sh @@ -3,8 +3,6 @@ CLI_OS="na" CLI_MAJOR_VER="v2-jf" VERSION="[RELEASE]" -# Order is by destination priority. -DESTINATION_PATHS="/usr/local/bin /usr/bin /opt/bin" if [ $# -eq 1 ] then @@ -65,13 +63,12 @@ curl -XGET "$URL" -L -k -g > $FILE_NAME chmod u+x $FILE_NAME # Move executable to a destination in path. -set -- $DESTINATION_PATHS +# Order is by destination priority. +set -- "/usr/local/bin" "/usr/bin" "/opt/bin" while [ -n "$1" ]; do # Check if destination is in path. - if echo $PATH|grep "$1" -> /dev/null ; then - mv $FILE_NAME $1 - if [ "$?" -eq "0" ] - then + if echo "$PATH"|grep "$1" -> /dev/null ; then + if mv $FILE_NAME "$1" ; then echo "" echo "The $FILE_NAME executable was installed in $1" jf intro @@ -79,8 +76,7 @@ while [ -n "$1" ]; do else echo "" echo "We'd like to install the JFrog CLI executable in $1. Please approve this installation by entering your password." - sudo mv $FILE_NAME $1 - if [ "$?" -eq "0" ] + if sudo mv $FILE_NAME "$1" -eq "0" then echo "" echo "The $FILE_NAME executable was installed in $1" diff --git a/build/installcli/jfrog.sh b/build/installcli/jfrog.sh index fe1d0695b..77c86b7af 100755 --- a/build/installcli/jfrog.sh +++ b/build/installcli/jfrog.sh @@ -3,8 +3,6 @@ CLI_OS="na" CLI_MAJOR_VER="v2" VERSION="[RELEASE]" -# Order is by destination priority. -DESTINATION_PATHS="/usr/local/bin /usr/bin /opt/bin" if [ $# -eq 1 ] then @@ -65,21 +63,19 @@ curl -XGET "$URL" -L -k -g > $FILE_NAME chmod u+x $FILE_NAME # Move executable to a destination in path. -set -- $DESTINATION_PATHS +# Order is by destination priority. +set -- "/usr/local/bin" "/usr/bin" "/opt/bin" while [ -n "$1" ]; do # Check if destination is in path. - if echo $PATH|grep "$1" -> /dev/null ; then - mv $FILE_NAME $1 - if [ "$?" -eq "0" ] - then + if echo "$PATH"|grep "$1" -> /dev/null ; then + if mv $FILE_NAME "$1" ; then echo "" echo "The $FILE_NAME executable was installed in $1" exit 0 else echo "" echo "We'd like to install the JFrog CLI executable in $1. Please approve this installation by entering your password." - sudo mv $FILE_NAME $1 - if [ "$?" -eq "0" ] + if sudo mv $FILE_NAME "$1" -eq "0" then echo "" echo "The $FILE_NAME executable was installed in $1" diff --git a/build/npm/v2-jf/bin/jf b/build/npm/v2-jf/bin/_ old mode 100755 new mode 100644 similarity index 100% rename from build/npm/v2-jf/bin/jf rename to build/npm/v2-jf/bin/_ diff --git a/build/npm/v2-jf/init.js b/build/npm/v2-jf/init.js index e3d82ebd6..36b3239ef 100644 --- a/build/npm/v2-jf/init.js +++ b/build/npm/v2-jf/init.js @@ -1,65 +1,90 @@ validateNpmVersion(); -var https = require('https'); -var http = require('http'); -var url = require('url'); -var fs = require('fs'); -var packageJson = require('./package.json'); -var fileName = getFileName(); -var filePath = "bin/" + fileName; -var version = packageJson.version; -var pkgName = "jfrog-cli-" + getArchitecture(); +const {get} = require("https"); +const {request} = require("http"); +const {URL} = require("url"); +const {createWriteStream, chmodSync} = require("fs"); +const packageJson = require("./package.json"); +const fileName = getFileName(); +const filePath = "bin/" + fileName; +const version = packageJson.version; +const pkgName = "jfrog-cli-" + getArchitecture(); downloadCli(); function validateNpmVersion() { if (!isValidNpmVersion()) { - throw new Error("JFrog CLI can be installed using npm version 5.0.0 or above."); + throw new Error( + "JFrog CLI can be installed using npm version 5.0.0 or above." + ); } } function downloadWithProxy(myUrl) { - var proxyparts = url.parse(process.env.https_proxy); - var myUrlParts = url.parse(myUrl); + const proxyParts = new URL(process.env.https_proxy); + const myUrlParts = new URL(myUrl); - http.request({ + request({ host: proxyparts.hostname, port: proxyparts.port, - method: 'CONNECT', - path: myUrlParts.hostname + ':443' - }).on('connect', function(res, socket, head) { - https.get({ - host: myUrlParts.hostname, - socket: socket, - path: myUrlParts.path, - agent: false - }, function(res) { - if (res.statusCode == 301 || res.statusCode == 302) { - downloadWithProxy(res.headers.location); - } else if (res.statusCode == 200) { - writeToFile(res); - } else { - console.log('Unexpected status code ' + res.statusCode + ' during JFrog CLI download'); - } - }).on('error', function (err) {console.error(err);}); - }).end(); + method: "CONNECT", + path: myUrlParts.hostname + ":443", + }) + .on("connect", function (res, socket, head) { + get( + { + host: myUrlParts.hostname, + socket: socket, + path: myUrlParts.path, + agent: false, + }, + function (res) { + if (res.statusCode === 301 || res.statusCode === 302) { + downloadWithProxy(res.headers.location); + } else if (res.statusCode === 200) { + writeToFile(res); + } else { + console.log( + "Unexpected status code " + + res.statusCode + + " during JFrog CLI download" + ); + } + } + ).on("error", function (err) { + console.error(err); + }); + }) + .end(); } function download(url) { - https.get(url, function(res) { - if (res.statusCode == 301 || res.statusCode == 302) { + get(url, function (res) { + if (res.statusCode === 301 || res.statusCode === 302) { download(res.headers.location); - } else if (res.statusCode == 200) { + } else if (res.statusCode === 200) { writeToFile(res); } else { - console.log('Unexpected status code ' + res.statusCode + ' during JFrog CLI download'); + console.log( + "Unexpected status code " + + res.statusCode + + " during JFrog CLI download" + ); } - }).on('error', function (err) {console.error(err);}); + }).on("error", function (err) { + console.error(err); + }); } function downloadCli() { - console.log("Downloading JFrog CLI " + version ); - var startUrl = 'https://releases.jfrog.io/artifactory/jfrog-cli/v2-jf/' + version + '/' + pkgName + '/' + fileName; + console.log("Downloading JFrog CLI " + version); + const startUrl = + "https://releases.jfrog.io/artifactory/jfrog-cli/v2-jf/" + + version + + "/" + + pkgName + + "/" + + fileName; // We detect outbound proxy by looking at the environment variable if (process.env.https_proxy && process.env.https_proxy.length > 0) { downloadWithProxy(startUrl); @@ -69,58 +94,61 @@ function downloadCli() { } function isValidNpmVersion() { - var child_process = require('child_process'); - var npmVersionCmdOut = child_process.execSync("npm version -json"); - var npmVersion = JSON.parse(npmVersionCmdOut).npm; + const child_process = require("child_process"); + const npmVersionCmdOut = child_process.execSync("npm version -json"); + const npmVersion = JSON.parse(npmVersionCmdOut).npm; // Supported since version 5.0.0 return parseInt(npmVersion.charAt(0)) > 4; } function writeToFile(response) { - var file = fs.createWriteStream(filePath); - response.on('data', function (chunk) { - file.write(chunk); - }).on('end', function () { - file.end(); - if (!process.platform.startsWith("win")) { - fs.chmodSync(filePath, 0555); - } - }).on('error', function (err) { - console.error(err); + const file = createWriteStream(filePath); + response + .on("data", function (chunk) { + file.write(chunk); + }) + .on("end", function () { + file.end(); + if (!process.platform.startsWith("win")) { + chmodSync(filePath, 755); + } + }) + .on("error", function (err) { + console.error(err); }); } function getArchitecture() { const platform = process.platform; - if (platform.startsWith('win')) { + if (platform.startsWith("win")) { // Windows architecture: - return 'windows-amd64'; + return "windows-amd64"; } const arch = process.arch; - if (platform.includes('darwin')) { + if (platform.includes("darwin")) { // macOS architecture: - return arch === 'arm64' ? 'mac-arm64' : 'mac-386'; + return arch === "arm64" ? "mac-arm64" : "mac-386"; } // linux architecture: switch (arch) { - case 'x64': - return 'linux-amd64'; - case 'arm64': - return 'linux-arm64'; - case 'arm': - return 'linux-arm'; - case 's390x': - return 'linux-s390x'; - case 'ppc64': - return 'linux-ppc64'; + case "x64": + return "linux-amd64"; + case "arm64": + return "linux-arm64"; + case "arm": + return "linux-arm"; + case "s390x": + return "linux-s390x"; + case "ppc64": + return "linux-ppc64"; default: - return 'linux-386'; + return "linux-386"; } } function getFileName() { - var executable = "jf"; + let executable = "jf"; if (process.platform.startsWith("win")) { executable += ".exe"; } diff --git a/build/npm/v2-jf/package-lock.json b/build/npm/v2-jf/package-lock.json index b73e73247..6f3b862d8 100644 --- a/build/npm/v2-jf/package-lock.json +++ b/build/npm/v2-jf/package-lock.json @@ -1,5 +1,6 @@ { "name": "jfrog-cli-v2-jf", - "version": "2.47.0", - "lockfileVersion": 1 + "version": "2.48.0", + "lockfileVersion": 2, + "requires": true, } diff --git a/build/npm/v2-jf/package.json b/build/npm/v2-jf/package.json index c9f4846ef..8d98ad931 100644 --- a/build/npm/v2-jf/package.json +++ b/build/npm/v2-jf/package.json @@ -1,6 +1,6 @@ { "name": "jfrog-cli-v2-jf", - "version": "2.47.0", + "version": "2.48.0", "description": "🐸 Command-line interface for JFrog Artifactory, Xray, Distribution, Pipelines and Mission Control 🐸", "homepage": "https://github.com/jfrog/jfrog-cli", "preferGlobal": true, diff --git a/build/npm/v2/bin/jfrog b/build/npm/v2/bin/_ old mode 100755 new mode 100644 similarity index 100% rename from build/npm/v2/bin/jfrog rename to build/npm/v2/bin/_ diff --git a/build/npm/v2/init.js b/build/npm/v2/init.js index 87ba9989b..d67da8a2c 100644 --- a/build/npm/v2/init.js +++ b/build/npm/v2/init.js @@ -1,66 +1,91 @@ validateNpmVersion(); -var https = require('https'); -var http = require('http'); -var url = require('url'); -var fs = require('fs'); -var packageJson = require('./package.json'); -var fileName = getFileName(); -var filePath = "bin/" + fileName; -var version = packageJson.version; -var pkgName = "jfrog-cli-" + getArchitecture(); +const {get} = require("https"); +const {request} = require("http"); +const {URL} = require("url"); +const {createWriteStream, chmodSync} = require("fs"); +const packageJson = require("./package.json"); +const fileName = getFileName(); +const filePath = "bin/" + fileName; +const version = packageJson.version; +const pkgName = "jfrog-cli-" + getArchitecture(); downloadCli(); function validateNpmVersion() { if (!isValidNpmVersion()) { - throw new Error("JFrog CLI can be installed using npm version 5.0.0 or above."); + throw new Error( + "JFrog CLI can be installed using npm version 5.0.0 or above." + ); } } function downloadWithProxy(myUrl) { - var proxyparts = url.parse(process.env.https_proxy); - var myUrlParts = url.parse(myUrl); + const proxyParts = new URL(process.env.https_proxy); + const myUrlParts = new URL(myUrl); - http.request({ + request({ host: proxyparts.hostname, port: proxyparts.port, - method: 'CONNECT', - path: myUrlParts.hostname + ':443' - }).on('connect', function(res, socket, head) { - https.get({ - host: myUrlParts.hostname, - socket: socket, - path: myUrlParts.path, - agent: false - }, function(res) { - if (res.statusCode == 301 || res.statusCode == 302) { - downloadWithProxy(res.headers.location); - } else if (res.statusCode == 200) { - writeToFile(res); - } else { - console.log('Unexpected status code ' + res.statusCode + ' during JFrog CLI download'); - } - }).on('error', function (err) {console.error(err);}); - }).end(); + method: "CONNECT", + path: myUrlParts.hostname + ":443", + }) + .on("connect", function (res, socket, head) { + get( + { + host: myUrlParts.hostname, + socket: socket, + path: myUrlParts.path, + agent: false, + }, + function (res) { + if (res.statusCode === 301 || res.statusCode === 302) { + downloadWithProxy(res.headers.location); + } else if (res.statusCode === 200) { + writeToFile(res); + } else { + console.log( + "Unexpected status code " + + res.statusCode + + " during JFrog CLI download" + ); + } + } + ).on("error", function (err) { + console.error(err); + }); + }) + .end(); } function download(url) { - https.get(url, function(res) { - if (res.statusCode == 301 || res.statusCode == 302) { + get(url, function (res) { + if (res.statusCode === 301 || res.statusCode === 302) { download(res.headers.location); - } else if (res.statusCode == 200) { + } else if (res.statusCode === 200) { writeToFile(res); } else { - console.log('Unexpected status code ' + res.statusCode + ' during JFrog CLI download'); + console.log( + "Unexpected status code " + + res.statusCode + + " during JFrog CLI download" + ); } - }).on('error', function (err) {console.error(err);}); + }).on("error", function (err) { + console.error(err); + }); } function downloadCli() { - console.log("Downloading JFrog CLI " + version ); - var startUrl = 'https://releases.jfrog.io/artifactory/jfrog-cli/v2/' + version + '/' + pkgName + '/' + fileName; - // We detect outbount proxy by looking at the environment variable + console.log("Downloading JFrog CLI " + version); + const startUrl = + "https://releases.jfrog.io/artifactory/jfrog-cli/v2/" + + version + + "/" + + pkgName + + "/" + + fileName; + // We detect outbound proxy by looking at the environment variable if (process.env.https_proxy && process.env.https_proxy.length > 0) { downloadWithProxy(startUrl); } else { @@ -69,58 +94,61 @@ function downloadCli() { } function isValidNpmVersion() { - var child_process = require('child_process'); - var npmVersionCmdOut = child_process.execSync("npm version -json"); - var npmVersion = JSON.parse(npmVersionCmdOut).npm; + const child_process = require("child_process"); + const npmVersionCmdOut = child_process.execSync("npm version -json"); + const npmVersion = JSON.parse(npmVersionCmdOut).npm; // Supported since version 5.0.0 return parseInt(npmVersion.charAt(0)) > 4; } function writeToFile(response) { - var file = fs.createWriteStream(filePath); - response.on('data', function (chunk) { - file.write(chunk); - }).on('end', function () { - file.end(); - if (!process.platform.startsWith("win")) { - fs.chmodSync(filePath, 0555); - } - }).on('error', function (err) { - console.error(err); + const file = createWriteStream(filePath); + response + .on("data", function (chunk) { + file.write(chunk); + }) + .on("end", function () { + file.end(); + if (!process.platform.startsWith("win")) { + chmodSync(filePath, 755); + } + }) + .on("error", function (err) { + console.error(err); }); } function getArchitecture() { const platform = process.platform; - if (platform.startsWith('win')) { + if (platform.startsWith("win")) { // Windows architecture: - return 'windows-amd64'; + return "windows-amd64"; } const arch = process.arch; - if (platform.includes('darwin')) { + if (platform.includes("darwin")) { // macOS architecture: - return arch === 'arm64' ? 'mac-arm64' : 'mac-386'; + return arch === "arm64" ? "mac-arm64" : "mac-386"; } // linux architecture: switch (arch) { - case 'x64': - return 'linux-amd64'; - case 'arm64': - return 'linux-arm64'; - case 'arm': - return 'linux-arm'; - case 's390x': - return 'linux-s390x'; - case 'ppc64': - return 'linux-ppc64'; + case "x64": + return "linux-amd64"; + case "arm64": + return "linux-arm64"; + case "arm": + return "linux-arm"; + case "s390x": + return "linux-s390x"; + case "ppc64": + return "linux-ppc64"; default: - return 'linux-386'; + return "linux-386"; } } function getFileName() { - var executable = "jfrog"; + let executable = "jfrog"; if (process.platform.startsWith("win")) { executable += ".exe"; } diff --git a/build/npm/v2/package-lock.json b/build/npm/v2/package-lock.json index 7731f7de6..aec377332 100644 --- a/build/npm/v2/package-lock.json +++ b/build/npm/v2/package-lock.json @@ -1,5 +1,6 @@ { "name": "jfrog-cli-v2", - "version": "2.47.0", - "lockfileVersion": 1 + "version": "2.48.0", + "lockfileVersion": 2, + "requires": true, } diff --git a/build/npm/v2/package.json b/build/npm/v2/package.json index 1824a549b..2cac51af2 100644 --- a/build/npm/v2/package.json +++ b/build/npm/v2/package.json @@ -1,6 +1,6 @@ { "name": "jfrog-cli-v2", - "version": "2.47.0", + "version": "2.48.0", "description": "🐸 Command-line interface for JFrog Artifactory, Xray, Distribution, Pipelines and Mission Control 🐸", "homepage": "https://github.com/jfrog/jfrog-cli", "preferGlobal": true, diff --git a/build/setupcli/jf.sh b/build/setupcli/jf.sh index 37a70b6fe..f6fa02fae 100755 --- a/build/setupcli/jf.sh +++ b/build/setupcli/jf.sh @@ -3,8 +3,6 @@ CLI_OS="na" CLI_MAJOR_VER="v2-jf" VERSION="[RELEASE]" -# Order is by destination priority. -DESTINATION_PATHS="/usr/local/bin /usr/bin /opt/bin" SETUP_COMMAND="jf setup" GREEN_COLOR='\033[0;32m' REMOVE_COLOR='\033[0m' @@ -69,28 +67,26 @@ curl -XGET "$URL" -L -k -g > $FILE_NAME chmod u+x $FILE_NAME # Move executable to a destination in path. -set -- $DESTINATION_PATHS +# Order is by destination priority. +set -- "/usr/local/bin" "/usr/bin" "/opt/bin" while [ -n "$1" ]; do # Check if destination is in path. - if echo $PATH|grep "$1" -> /dev/null ; then - mv $FILE_NAME $1 - if [ "$?" -eq "0" ] - then + if echo "$PATH"|grep "$1" -> /dev/null ; then + if mv $FILE_NAME "$1" ; then echo "" echo "The $FILE_NAME executable was installed in $1" print_installation_greeting - $SETUP_COMMAND $BASE64_CRED + $SETUP_COMMAND "$BASE64_CRED" exit 0 else echo "" echo "We'd like to install the JFrog CLI executable in $1. Please approve this installation by entering your password." - sudo mv $FILE_NAME $1 - if [ "$?" -eq "0" ] + if sudo mv $FILE_NAME "$1" -eq "0" then echo "" echo "The $FILE_NAME executable was installed in $1" print_installation_greeting - $SETUP_COMMAND $BASE64_CRED + $SETUP_COMMAND "$BASE64_CRED" exit 0 fi fi diff --git a/documentation/CLI-for-JFrog-Lifecycle.md b/documentation/CLI-for-JFrog-Lifecycle.md index ce64fff87..eaccb5bd6 100644 --- a/documentation/CLI-for-JFrog-Lifecycle.md +++ b/documentation/CLI-for-JFrog-Lifecycle.md @@ -17,7 +17,7 @@ The following sections describe the commands available in JFrog CLI when perform ### Creating a Release Bundle v2 from builds or from existing Release Bundles -This command creates a Release Bundle v2 from a published build-info or from an existing Release Bundle. +This command creates a Release Bundle v2 from published build-infos or from existing Release Bundles. 1. To create a Release Bundle from published build-infos, provide the `--builds` option, which accepts a path to a file using the following JSON format: ```json { @@ -48,20 +48,20 @@ This command creates a Release Bundle v2 from a published build-info or from an ``` `project` is optional (if left empty, the default project will be used) -| | | -|------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Command-name | release-bundle-create | -| Abbreviation | rbc | -| Command options | | -| --builds | \[Optional\]

Path to a JSON file containing information about the source builds from which to create a Release Bundle. | -| --project | \[Optional\]

JFrog Project key associated with the Release Bundle version. | -| --release-bundles | \[Optional\]

Path to a JSON file containing information about the source Release Bundles from which to create a Release Bundle. | -| --server-id | \[Optional\]

Platform server ID configured using the `jf c add` command. | -| --signing-key | \[Mandatory\]

The GPG/RSA key-pair name given in Artifactory. | -| --sync | \[Default: false\]

Set to true to run synchronously. | -| Command arguments | | -| release bundle name | Name of the newly created Release Bundle. | -| release bundle version | Version of the newly created Release Bundle. | +| | | +|------------------------|----------------------------------------------------------------------------------------------------------------------------------------| +| Command-name | release-bundle-create | +| Abbreviation | rbc | +| Command options | | +| --builds | \[Optional\]

Path to a JSON file containing information about the source builds from which to create a Release Bundle. | +| --project | \[Optional\]

JFrog Project key associated with the Release Bundle version. | +| --release-bundles | \[Optional\]

Path to a JSON file containing information about the source Release Bundles from which to create a Release Bundle. | +| --server-id | \[Optional\]

Platform server ID configured using the `jf c add` command. | +| --signing-key | \[Mandatory\]

The GPG/RSA key-pair name given in Artifactory. | +| --sync | \[Default: false\]

Set to true to run synchronously. | +| Command arguments | | +| release bundle name | Name of the newly created Release Bundle. | +| release bundle version | Version of the newly created Release Bundle. | #### Examples @@ -77,33 +77,33 @@ jf rbc --builds=/path/to/builds-spec.json --signing-key=myKeyPair myApp 1.0.0 Create a Release Bundle v2 with the name "myApp" and version "1.0.0", with signing key pair "myKeyPair". The Release Bundle will include the artifacts of the Release Bundles that were provided in the Release Bundles spec. ``` -jf rbc --spec=/path/to/release-bundles-spec.json --signing-key=myKeyPair myApp 1.0.0 +jf rbc --release-bundles=/path/to/release-bundles-spec.json --signing-key=myKeyPair myApp 1.0.0 ``` ##### Example 3 Create a Release Bundle v2 synchronously with the name "myApp" and version "1.0.0", in project "project0", with signing key pair "myKeyPair". The Release Bundle will include the artifacts of the Release Bundles that were provided in the Release Bundles spec. ``` -jf rbc --spec=/path/to/release-bundles-spec.json --signing-key=myKeyPair --sync=true --project=project0 myApp 1.0.0 +jf rbc --release-bundles=/path/to/release-bundles-spec.json --signing-key=myKeyPair --sync=true --project=project0 myApp 1.0.0 ``` ### Promoting a Release Bundle v2 This command promotes a Release Bundle v2 to a target environment. -| | | -|------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Command-name | release-bundle-promote | -| Abbreviation | rbp | -| Command options | | -| --overwrite | \[Default: false\]

Set to true to replace artifacts with the same name but a different checksum, if such already exist at the promotion targets. By default, the promotion is stopped when a conflict occurs.| -| --project | \[Optional\]

Project key associated with the Release Bundle version. | -| --server-id | \[Optional\]

Platform server ID configured using the config command. | -| --signing-key | \[Mandatory\]

The GPG/RSA key-pair name given in Artifactory. | -| --sync | \[Default: false\]

Set to true to run synchronously. | -| Command arguments | | -| release bundle name | Name of the Release Bundle to promote. | -| release bundle version | Version of the Release Bundle to promote. | -| environment | Name of the target environment for the promotion. | +| | | +|------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Command-name | release-bundle-promote | +| Abbreviation | rbp | +| Command options | | +| --overwrite | \[Default: false\]

Set to true to replace artifacts with the same name but a different checksum, if such already exist at the promotion targets. By default, the promotion is stopped when a conflict occurs. | +| --project | \[Optional\]

Project key associated with the Release Bundle version. | +| --server-id | \[Optional\]

Platform server ID configured using the config command. | +| --signing-key | \[Mandatory\]

The GPG/RSA key-pair name given in Artifactory. | +| --sync | \[Default: false\]

Set to true to run synchronously. | +| Command arguments | | +| release bundle name | Name of the Release Bundle to promote. | +| release bundle version | Version of the Release Bundle to promote. | +| environment | Name of the target environment for the promotion. | #### Examples ##### Example 1 diff --git a/documentation/CLI-for-JFrog-Xray.md b/documentation/CLI-for-JFrog-Xray.md index 4bd355393..57b96f506 100644 --- a/documentation/CLI-for-JFrog-Xray.md +++ b/documentation/CLI-for-JFrog-Xray.md @@ -31,25 +31,22 @@ To authenticate yourself using an Xray Access Token, either configure your Acces ## Scanning Project Dependencies ### General -The _**jf audit**_ command allows scanning your source code dependencies to find security vulnerabilities and licenses violations, with the ability to scan against your Xray policies. The command builds a deep dependencies graph for your project, scans it with Xray, and displays the results. It uses the package manager used by the project to build the dependencies graph. Currently, the following package managers are supported. - -* Maven (mvn) - Version 3.1.0 or above of Maven is supported. -* Gradle (gradle) -* Npm (npm) -* Yarn (yarn) -* Pip (pip) -* Pipenv (pipenv) -* Poetry (poetry) -* Go Modules (go) -* NuGet (nuget) -* .NET Core CLI (dotnet) +The _**jf audit**_ command allows scanning your source code dependencies to find security vulnerabilities and license violations, with the ability to scan against your Xray policies. The command builds a deep dependencies graph for your project, scans it with Xray, and displays the results. +It uses the package manager used by the project to build the dependencies graph. + +Currently, the following package managers are supported: + +|Go Go|Gradle Gradle|Maven Maven (3.1+)|npm npm|Yarn Yarn| +|:----|:----|:----|:----|:----| +|.NET .NET|NuGet NuGet|Pip Pip|Pipenv Pipenv|Poetry Poetry| The command will detect the package manager used by the project automatically. It requires version 3.29.0 or above of Xray and also version 2.13.0 or above of JFrog CLI. ### Advanced Scans This command also supports the following Advanced Scans with the **Advanced Security Package** enabled on the JFrog Platform instance. To enable the Advanced Security Package, contact us using [this](https://jfrog.com/advanced-security-contact-us/) form. -* **Vulnerability Contextual Analysis**: This feature uses the code context to eliminate false positive reports on vulnerable dependencies that are not applicable to the code. Vulnerability Contextual Analysis is currently supported for Python and JavaScript code. +* **Static Application Security Testing (SAST)**: Provides fast and accurate security-focused engines that detect zero-day security vulnerabilities on your source code sensitive operations, while minimizing false positives. +* **Vulnerability Contextual Analysis**: This feature uses the code context to eliminate false positive reports on vulnerable dependencies that are not applicable to the code. For CVE vulnerabilities that are applicable to your code, Frogbot will create pull request comments on the relevant code lines with full descriptions regarding the security issues caused by the CVE. Vulnerability Contextual Analysis is currently supported for Python, JavaScript, and Java code. * **Secrets Detection**: Detect any secrets left exposed inside the code. to stop any accidental leak of internal tokens or credentials. * **Infrastructure as Code scans (IaC)**: Scan Infrastructure as Code (Terraform) files for early detection of cloud and infrastructure misconfigurations. diff --git a/go.mod b/go.mod index 79394e410..a829c0983 100644 --- a/go.mod +++ b/go.mod @@ -9,8 +9,8 @@ require ( github.com/gocarina/gocsv v0.0.0-20230616125104-99d496ca653d github.com/jfrog/build-info-go v1.9.10 github.com/jfrog/gofrog v1.3.0 - github.com/jfrog/jfrog-cli-core/v2 v2.43.0 - github.com/jfrog/jfrog-client-go v1.32.2 + github.com/jfrog/jfrog-cli-core/v2 v2.43.2 + github.com/jfrog/jfrog-client-go v1.32.3 github.com/jszwec/csvutil v1.8.0 github.com/mholt/archiver/v3 v3.5.1 github.com/stretchr/testify v1.8.4 @@ -30,7 +30,8 @@ require ( github.com/BurntSushi/toml v1.3.2 // indirect github.com/CycloneDX/cyclonedx-go v0.7.2 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect - github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 // indirect + github.com/Microsoft/hcsshim v0.11.0 // indirect + github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c // indirect github.com/VividCortex/ewma v1.2.0 // indirect github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect github.com/acomagu/bufpipe v1.0.4 // indirect @@ -39,13 +40,13 @@ require ( github.com/cenkalti/backoff/v4 v4.2.1 // indirect github.com/chzyer/readline v1.5.1 // indirect github.com/cloudflare/circl v1.3.3 // indirect - github.com/containerd/containerd v1.7.3 // indirect + github.com/containerd/containerd v1.7.6 // indirect github.com/cpuguy83/dockercfg v0.3.1 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect github.com/cyphar/filepath-securejoin v0.2.4 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/docker/distribution v2.8.2+incompatible // indirect - github.com/docker/docker v24.0.5+incompatible // indirect + github.com/docker/docker v24.0.6+incompatible // indirect github.com/docker/go-connections v0.4.0 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect @@ -64,6 +65,7 @@ require ( github.com/hashicorp/hcl v1.0.0 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/jedib0t/go-pretty/v6 v6.4.7 // indirect + github.com/jfrog/jfrog-apps-config v1.0.1 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect github.com/klauspost/compress v1.16.0 // indirect github.com/klauspost/cpuid/v2 v2.2.3 // indirect @@ -84,7 +86,7 @@ require ( github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0-rc4 // indirect github.com/opencontainers/runc v1.1.5 // indirect - github.com/owenrumney/go-sarif/v2 v2.2.0 // indirect + github.com/owenrumney/go-sarif/v2 v2.2.2 // indirect github.com/pelletier/go-toml/v2 v2.0.8 // indirect github.com/pierrec/lz4/v4 v4.1.2 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect @@ -105,7 +107,7 @@ require ( github.com/subosito/gotenv v1.4.2 // indirect github.com/ulikunitz/xz v0.5.9 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect - github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778 // indirect @@ -124,12 +126,12 @@ require ( gopkg.in/yaml.v3 v3.0.1 // indirect ) -// replace github.com/jfrog/build-info-go => github.com/jfrog/build-info-go v1.8.9-0.20230828134416-f0db33dd9344 +replace github.com/jfrog/build-info-go => github.com/jfrog/build-info-go v1.8.9-0.20230928084830-478bd49f5d3e -replace github.com/jfrog/jfrog-cli-core/v2 => github.com/RobiNino/jfrog-cli-core/v2 v2.0.0-20230914121902-5d87822fae6d +replace github.com/jfrog/jfrog-cli-core/v2 => github.com/RobiNino/jfrog-cli-core/v2 v2.0.0-20231002131847-e7ac8274f4e3 // replace github.com/jfrog/gofrog => github.com/jfrog/gofrog v1.2.6-0.20230418122323-2bf299dd6d27 -replace github.com/jfrog/jfrog-client-go => github.com/RobiNino/jfrog-client-go v0.0.0-20230914111739-e5625524b232 +replace github.com/jfrog/jfrog-client-go => github.com/RobiNino/jfrog-client-go v0.0.0-20231002130445-b16f43c60c85 // replace github.com/jfrog/build-info-go => github.com/jfrog/build-info-go v1.8.9-0.20230831151231-e5e7bd035ddc diff --git a/go.sum b/go.sum index 9ec1d873c..63d5b84d9 100644 --- a/go.sum +++ b/go.sum @@ -38,7 +38,7 @@ cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3f dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/AdaLogics/go-fuzz-headers v0.0.0-20230106234847-43070de90fa1 h1:EKPd1INOIyr5hWOWhvpmQpY6tKjeG0hT1s3AMC/9fic= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= @@ -50,13 +50,14 @@ github.com/CycloneDX/cyclonedx-go v0.7.2/go.mod h1:K2bA+324+Og0X84fA8HhN2X066K7B github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= -github.com/Microsoft/hcsshim v0.10.0-rc.8 h1:YSZVvlIIDD1UxQpJp0h+dnpLUw+TrY0cx8obKsp3bek= -github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 h1:kkhsdkhsCvIsutKu5zLMgWtgh9YxGCNAw8Ad8hjwfYg= -github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= -github.com/RobiNino/jfrog-cli-core/v2 v2.0.0-20230914121902-5d87822fae6d h1:cefYQmqRldmCK6wKlnnGWjvPmbIZdBik908u5PsdlXI= -github.com/RobiNino/jfrog-cli-core/v2 v2.0.0-20230914121902-5d87822fae6d/go.mod h1:8+7FsK3yGO5TpxlEyJ6d3tHRwk8XvFsHde6Ih5Aty6c= -github.com/RobiNino/jfrog-client-go v0.0.0-20230914111739-e5625524b232 h1:WnSlTYiMi3TLbZgHK82wcbl9QMNGO5PHMDmP1n5jINk= -github.com/RobiNino/jfrog-client-go v0.0.0-20230914111739-e5625524b232/go.mod h1:UewnwkIf/77HzBgwCPzOHZCK6V/Nw5/JwdzN/tRb4aU= +github.com/Microsoft/hcsshim v0.11.0 h1:7EFNIY4igHEXUdj1zXgAyU3fLc7QfOKHbkldRVTBdiM= +github.com/Microsoft/hcsshim v0.11.0/go.mod h1:OEthFdQv/AD2RAdzR6Mm1N1KPCztGKDurW1Z8b8VGMM= +github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c h1:kMFnB0vCcX7IL/m9Y5LO+KQYv+t1CQOiFe6+SV2J7bE= +github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/RobiNino/jfrog-cli-core/v2 v2.0.0-20231002131847-e7ac8274f4e3 h1:p2Q/CBUdWPEmFQ7lPYElNvezgXKBowWFzeTvjjZzluE= +github.com/RobiNino/jfrog-cli-core/v2 v2.0.0-20231002131847-e7ac8274f4e3/go.mod h1:Ij3mfD91tmAMnQ19cFuv2eCs3UAsCfZ54ICxdQtxUg4= +github.com/RobiNino/jfrog-client-go v0.0.0-20231002130445-b16f43c60c85 h1:lfFzAw4zLbV/zlvQVy0441vvkJbqIBFOgPJtptn1G64= +github.com/RobiNino/jfrog-client-go v0.0.0-20231002130445-b16f43c60c85/go.mod h1:AePTNv5H1YSGycxiL+1jXHCzqu3rCGruVP7S0N+BEEo= github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1ow= github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= @@ -99,8 +100,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= -github.com/containerd/containerd v1.7.3 h1:cKwYKkP1eTj54bP3wCdXXBymmKRQMrWjkLSWZZJDa8o= -github.com/containerd/containerd v1.7.3/go.mod h1:32FOM4/O0RkNg7AjQj3hDzN9cUGtu+HMvaKUNiqCZB8= +github.com/containerd/containerd v1.7.6 h1:oNAVsnhPoy4BTPQivLgTzI9Oleml9l/+eYIDYXRCYo8= +github.com/containerd/containerd v1.7.6/go.mod h1:SY6lrkkuJT40BVNO37tlYTSnKJnP5AXBc0fhx0q+TJ4= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= @@ -118,8 +119,8 @@ github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+ github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v24.0.5+incompatible h1:WmgcE4fxyI6EEXxBRxsHnZXrO1pQ3smi0k/jho4HLeY= -github.com/docker/docker v24.0.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v24.0.6+incompatible h1:hceabKCtUgDqPu+qm0NgsaXf28Ljf4/pWFL7xjWWDgE= +github.com/docker/docker v24.0.6+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= @@ -239,10 +240,12 @@ github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOl github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jedib0t/go-pretty/v6 v6.4.7 h1:lwiTJr1DEkAgzljsUsORmWsVn5MQjt1BPJdPCtJ6KXE= github.com/jedib0t/go-pretty/v6 v6.4.7/go.mod h1:Ndk3ase2CkQbXLLNf5QDHoYb6J9WtVfmHZu9n8rk2xs= -github.com/jfrog/build-info-go v1.9.10 h1:uXnDLVxpqxoAMpXcki00QaBB+M2BoGMMpHODPkmmYOY= -github.com/jfrog/build-info-go v1.9.10/go.mod h1:ujJ8XQZMdT2tMkLSMJNyDd1pCY+duwHdjV+9or9FLIg= +github.com/jfrog/build-info-go v1.8.9-0.20230928084830-478bd49f5d3e h1:tWNlQScbapCz5/EBc+lKBBQcZ/3QLgM3tM3HBEtxCTs= +github.com/jfrog/build-info-go v1.8.9-0.20230928084830-478bd49f5d3e/go.mod h1:ujJ8XQZMdT2tMkLSMJNyDd1pCY+duwHdjV+9or9FLIg= github.com/jfrog/gofrog v1.3.0 h1:o4zgsBZE4QyDbz2M7D4K6fXPTBJht+8lE87mS9bw7Gk= github.com/jfrog/gofrog v1.3.0/go.mod h1:IFMc+V/yf7rA5WZ74CSbXe+Lgf0iApEQLxRZVzKRUR0= +github.com/jfrog/jfrog-apps-config v1.0.1 h1:mtv6k7g8A8BVhlHGlSveapqf4mJfonwvXYLipdsOFMY= +github.com/jfrog/jfrog-apps-config v1.0.1/go.mod h1:8AIIr1oY9JuH5dylz2S6f8Ym2MaadPLR6noCBO4C22w= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jszwec/csvutil v1.8.0 h1:G7vS2LGdpZZDH1HmHeNbxOaJ/ZnJlpwGFvOkTkJzzNk= @@ -316,8 +319,8 @@ github.com/opencontainers/runc v1.1.5/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJ github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/selinux v1.10.0/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= github.com/owenrumney/go-sarif v1.1.1/go.mod h1:dNDiPlF04ESR/6fHlPyq7gHKmrM0sHUvAGjsoh8ZH0U= -github.com/owenrumney/go-sarif/v2 v2.2.0 h1:1DmZaijK0HBZCR1fgcDSGa7VzYkU9NDmbZ7qC2QfUjE= -github.com/owenrumney/go-sarif/v2 v2.2.0/go.mod h1:MSqMMx9WqlBSY7pXoOZWgEsVB4FDNfhcaXDA1j6Sr+w= +github.com/owenrumney/go-sarif/v2 v2.2.2 h1:x2acaiiAW9hu+78wbEYBRGLk5nRtHmkv7HeUsKvblwc= +github.com/owenrumney/go-sarif/v2 v2.2.2/go.mod h1:MSqMMx9WqlBSY7pXoOZWgEsVB4FDNfhcaXDA1j6Sr+w= github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= github.com/pierrec/lz4/v4 v4.1.2 h1:qvY3YFXRQE/XB8MlLzJH7mSzBs74eA2gg52YTk6jUPM= @@ -397,8 +400,9 @@ github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+ github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= diff --git a/scan/cli.go b/scan/cli.go index 33c6df002..e9342f2b9 100644 --- a/scan/cli.go +++ b/scan/cli.go @@ -185,10 +185,10 @@ func AuditCmd(c *cli.Context) error { // On Maven we use '--mvn' flag techExists = c.Bool("mvn") } else { - techExists = c.Bool(tech.ToString()) + techExists = c.Bool(tech.String()) } if techExists { - technologies = append(technologies, tech.ToString()) + technologies = append(technologies, tech.String()) } } auditCmd.SetTechnologies(technologies) diff --git a/testdata/xray/jas-config/.jfrog/jfrog-apps-config.yml b/testdata/xray/jas-config/.jfrog/jfrog-apps-config.yml new file mode 100644 index 000000000..b92f76728 --- /dev/null +++ b/testdata/xray/jas-config/.jfrog/jfrog-apps-config.yml @@ -0,0 +1,11 @@ +version: "1.0" + +modules: + - source_root: "." + scanners: + secrets: + exclude_patterns: + - "**/*secret_generic*/**" + iac: + exclude_patterns: + - "**/*gcp*/**" \ No newline at end of file diff --git a/testdata/xray/jas-config/iac/azure/vpc/module.tf b/testdata/xray/jas-config/iac/azure/vpc/module.tf new file mode 100644 index 000000000..9bdbab532 --- /dev/null +++ b/testdata/xray/jas-config/iac/azure/vpc/module.tf @@ -0,0 +1,116 @@ + +#Azure Generic vNet Module +resource "azurerm_resource_group" "network" { + count = var.module_enabled ? 1 : 0 + name = var.short_region != " " ? var.short_region : "${var.deploy_name}-${var.region}" + location = var.region + + tags = { + environment = var.environment + } +} + +resource "azurerm_virtual_network" "vnet" { + count = var.module_enabled ? 1 : 0 + name = "${var.deploy_name}-${var.region}" + location = var.region + address_space = [var.vpc_cidr] + resource_group_name = azurerm_resource_group.network[0].name + + tags = { + environment = var.environment + costcenter = "${var.deploy_name}-${var.region}" + } +} + +resource "azurerm_subnet" "subnet" { + count = var.module_enabled ? length(var.subnet_names) : 0 + name = var.subnet_names[count.index] + virtual_network_name = azurerm_virtual_network.vnet[0].name + resource_group_name = azurerm_resource_group.network[0].name + address_prefixes = [var.subnet_prefixes[count.index]] +# service_endpoints = [ +# "Microsoft.KeyVault" +# ] + + dynamic "delegation"{ + for_each =var.subnet_names[count.index] == "flexible-dbs" ? ["exec"] : [] + content { + name = "dlg-Microsoft.DBforPostgreSQL-flexibleServers" + service_delegation { + name = "Microsoft.DBforPostgreSQL/flexibleServers" + actions = [ + "Microsoft.Network/virtualNetworks/subnets/join/action" + ] + } + } + } + + enforce_private_link_endpoint_network_policies = var.subnet_names[count.index] == "data" + enforce_private_link_service_network_policies = var.subnet_names[count.index] == "private" && var.enforce_pl_svc_net_private + lifecycle { + ignore_changes = [ + service_endpoints, + delegation[0].name + ] + } +} + + +resource "azurerm_private_dns_zone" "postgres_private_dns" { + count = var.module_enabled ? 1 : 0 + name = "privatelink.postgres.database.azure.com" + resource_group_name = azurerm_resource_group.network[0].name +} + +resource "random_string" "postgres_private_dns_net_link_name" { + count = var.module_enabled ? 1 : 0 + length = 8 + special = false + number = false + upper = false +} + +resource "azurerm_private_dns_zone_virtual_network_link" "postgres_private_dns_net_link" { + count = var.module_enabled ? 1 : 0 + name = random_string.postgres_private_dns_net_link_name[0].result + resource_group_name = azurerm_resource_group.network[0].name + private_dns_zone_name = azurerm_private_dns_zone.postgres_private_dns[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +//resource "azurerm_network_security_group" "nsg" { +// count = "${var.module_enabled ? length(var.subnet_names) : 0}" +// name = "${var.subnet_names[count.index]}-sg" +// location = "${var.region}" +// resource_group_name = "${var.deploy_name}-${var.region}" +//} +// +//resource "azurerm_subnet_network_security_group_association" "nsg" { +// count = "${var.module_enabled ? length(var.subnet_names) : 0}" +// subnet_id = "${element(azurerm_subnet.subnet.*.id, count.index)}" +// network_security_group_id = "${element(azurerm_network_security_group.nsg.*.id, count.index)}" +//} +//resource "azurerm_subnet_route_table_association" "nat" { +// count = "${var.module_enabled ? length(var.nat_subnets) : 0}" +// subnet_id = "${element(azurerm_subnet.subnet.*.id, count.index + 1)}" +// route_table_id = "${azurerm_route_table.nattable.id}" +//} +# UDR +//resource "azurerm_route_table" "nattable" { +// count = "${var.module_enabled}" +// name = "${var.deploy_name}-${var.region}" +// location = "${var.region}" +// resource_group_name = "${azurerm_resource_group.network.name}" +// +// route { +// name = "all-traffic-via-nat" +// address_prefix = "0.0.0.0/0" +// next_hop_type = "VirtualAppliance" +// next_hop_in_ip_address = "${var.natgw_private_ip}" +// } +// +// tags = { +// environment = "${var.environment}" +// } +//} diff --git a/testdata/xray/jas-config/iac/azure/vpc/outputs.tf b/testdata/xray/jas-config/iac/azure/vpc/outputs.tf new file mode 100644 index 000000000..62a693aa7 --- /dev/null +++ b/testdata/xray/jas-config/iac/azure/vpc/outputs.tf @@ -0,0 +1,79 @@ +################################################################################## +# OUTPUT +################################################################################## + +output "resource_group_id" { + value = azurerm_resource_group.network[0].id +} + +output "resource_group_name" { + value = azurerm_resource_group.network[0].name +} + +output "vnet_id" { + value = element(concat(azurerm_virtual_network.vnet.*.id, [""]), 0) +} + +output "vnet_location" { + value = element(concat(azurerm_virtual_network.vnet.*.location, [""]), 0) +} + +output "vnet_name" { + value = element(concat(azurerm_virtual_network.vnet.*.name, [""]), 0) +} + +output "private_dns_id" { + value = element( + concat(azurerm_private_dns_zone.postgres_private_dns.*.id, [""]), + 0, + ) +} + +output "private_dns_name" { + value = element( + concat(azurerm_private_dns_zone.postgres_private_dns.*.name, [""]), + 0, + ) +} + +//output "vnet_subnets" { +// value = "${azurerm_subnet.subnet.*.id}" +//} + +### subnets ids ### +output "public_subnet" { + value = element(concat(azurerm_subnet.subnet.*.id, [""]), 0) +} + +output "private_subnet" { + value = element(concat(azurerm_subnet.subnet.*.id, [""]), 1) +} +output "flexible_subnet" { + value = element(concat(azurerm_subnet.subnet.*.id, [""]), 4) +} +output "data_subnet" { + value = element(concat(azurerm_subnet.subnet.*.id, [""]), 2) +} + +output "mgmt_subnet" { + value = element(concat(azurerm_subnet.subnet.*.id, [""]), 3) +} + +### subnets names ### +output "public_subnet_name" { + value = element(concat(azurerm_subnet.subnet.*.name, [""]), 0) +} + +output "private_subnet_name" { + value = element(concat(azurerm_subnet.subnet.*.name, [""]), 1) +} + +output "data_subnet_name" { + value = element(concat(azurerm_subnet.subnet.*.name, [""]), 2) +} + +output "mgmt_subnet_name" { + value = element(concat(azurerm_subnet.subnet.*.name, [""]), 3) +} + + diff --git a/testdata/xray/jas-config/iac/azure/vpc/variables.tf b/testdata/xray/jas-config/iac/azure/vpc/variables.tf new file mode 100644 index 000000000..e04fd5f78 --- /dev/null +++ b/testdata/xray/jas-config/iac/azure/vpc/variables.tf @@ -0,0 +1,39 @@ +variable "module_enabled" { + default = true +} + +variable "region" { +} + +variable "deploy_name" { +} + +variable "vpc_cidr" { +} + +variable "short_region" { + default = " " +} + +variable "subnet_prefixes" { + type = list(string) +} + +variable "ssh_source_ranges" { + type = list(string) +} + +variable "environment" { +} + +variable "subnet_names" { + type = list(string) +} + +variable "enforce_pl_svc_net_private" { + default = false +} +//variable "natgw_private_ip" {} +//variable "nat_subnets" { +// type = "list" +//} diff --git a/testdata/xray/jas-config/iac/azure/vpc/versions.tf b/testdata/xray/jas-config/iac/azure/vpc/versions.tf new file mode 100644 index 000000000..ac97c6ac8 --- /dev/null +++ b/testdata/xray/jas-config/iac/azure/vpc/versions.tf @@ -0,0 +1,4 @@ + +terraform { + required_version = ">= 0.12" +} diff --git a/testdata/xray/jas-config/iac/azure/vpc_pp/module.tf b/testdata/xray/jas-config/iac/azure/vpc_pp/module.tf new file mode 100644 index 000000000..5caae96c7 --- /dev/null +++ b/testdata/xray/jas-config/iac/azure/vpc_pp/module.tf @@ -0,0 +1,34 @@ + +#Azure Generic vNet Module +resource "azurerm_resource_group" "network" { + count = var.module_enabled ? 1 : 0 + name = var.short_region != " " ? var.short_region : "${var.deploy_name}-${var.region}" + location = var.region + + tags = { + environment = var.environment + } +} + +resource "azurerm_virtual_network" "vnet" { + count = var.module_enabled ? 1 : 0 + name = "${var.deploy_name}-${var.region}" + location = var.region + address_space = [var.vpc_cidr] + resource_group_name = azurerm_resource_group.network[0].name + + tags = { + environment = var.environment + costcenter = "${var.deploy_name}-${var.region}" + } +} + +resource "azurerm_subnet" "subnet" { + count = var.module_enabled ? length(var.subnet_names) : 0 + name = var.subnet_names[count.index] + virtual_network_name = azurerm_virtual_network.vnet[0].name + resource_group_name = azurerm_resource_group.network[0].name + address_prefixes = [var.subnet_prefixes[count.index]] + enforce_private_link_endpoint_network_policies = var.subnet_names[count.index] == "private" && var.enforce_private_subnet + +} \ No newline at end of file diff --git a/testdata/xray/jas-config/iac/azure/vpc_pp/outputs.tf b/testdata/xray/jas-config/iac/azure/vpc_pp/outputs.tf new file mode 100644 index 000000000..7e1472580 --- /dev/null +++ b/testdata/xray/jas-config/iac/azure/vpc_pp/outputs.tf @@ -0,0 +1,62 @@ +################################################################################## +# OUTPUT +################################################################################## + +output "resource_group_id" { + value = azurerm_resource_group.network[0].id +} + +output "resource_group_name" { + value = azurerm_resource_group.network[0].name +} + +output "vnet_id" { + value = element(concat(azurerm_virtual_network.vnet.*.id, [""]), 0) +} + +output "vnet_location" { + value = element(concat(azurerm_virtual_network.vnet.*.location, [""]), 0) +} + +output "vnet_name" { + value = element(concat(azurerm_virtual_network.vnet.*.name, [""]), 0) +} + +//output "vnet_subnets" { +// value = "${azurerm_subnet.subnet.*.id}" +//} + +### subnets ids ### +output "public_subnet" { + value = element(concat(azurerm_subnet.subnet.*.id, [""]), 0) +} + +output "private_subnet" { + value = element(concat(azurerm_subnet.subnet.*.id, [""]), 1) +} + +output "data_subnet" { + value = element(concat(azurerm_subnet.subnet.*.id, [""]), 2) +} + +output "mgmt_subnet" { + value = element(concat(azurerm_subnet.subnet.*.id, [""]), 3) +} + +### subnets names ### +output "public_subnet_name" { + value = element(concat(azurerm_subnet.subnet.*.name, [""]), 0) +} + +output "private_subnet_name" { + value = element(concat(azurerm_subnet.subnet.*.name, [""]), 1) +} + +output "data_subnet_name" { + value = element(concat(azurerm_subnet.subnet.*.name, [""]), 2) +} + +output "mgmt_subnet_name" { + value = element(concat(azurerm_subnet.subnet.*.name, [""]), 3) +} + diff --git a/testdata/xray/jas-config/iac/azure/vpc_pp/variables.tf b/testdata/xray/jas-config/iac/azure/vpc_pp/variables.tf new file mode 100644 index 000000000..784782aa7 --- /dev/null +++ b/testdata/xray/jas-config/iac/azure/vpc_pp/variables.tf @@ -0,0 +1,40 @@ +variable "module_enabled" { + default = true +} + +variable "region" { +} + +variable "deploy_name" { +} + +variable "vpc_cidr" { +} + +variable "short_region" { + default = " " +} + +variable "subnet_prefixes" { + type = list(string) +} + +variable "ssh_source_ranges" { + type = list(string) +} + +variable "environment" { +} + +variable "subnet_names" { + type = list(string) +} + +variable "enforce_private_subnet" { + default = true +} + +//variable "natgw_private_ip" {} +//variable "nat_subnets" { +// type = "list" +//} diff --git a/testdata/xray/jas-config/iac/azure/vpc_pp/versions.tf b/testdata/xray/jas-config/iac/azure/vpc_pp/versions.tf new file mode 100644 index 000000000..ac97c6ac8 --- /dev/null +++ b/testdata/xray/jas-config/iac/azure/vpc_pp/versions.tf @@ -0,0 +1,4 @@ + +terraform { + required_version = ">= 0.12" +} diff --git a/testdata/xray/jas-config/iac/gcp/k8s-oss/files/chk_k8s_nat b/testdata/xray/jas-config/iac/gcp/k8s-oss/files/chk_k8s_nat new file mode 100644 index 000000000..f4f318bbf --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-oss/files/chk_k8s_nat @@ -0,0 +1,17 @@ +#!/bin/bash + +# This script checks the functionality of the NAT gateway VM. + +tf_output=$(terraform output) +k8s_name=$(echo "$tf_output" | grep k8s_cluster_name | cut -d = -f 2) +k8s_project=$(echo "$tf_output" | grep k8s_project | cut -d = -f 2) +k8s_zone=$(echo "$tf_output" | grep k8s_zone | cut -d = -f 2) +k8s_ext_ip=$(echo "$tf_output" | grep external_ip | cut -d = -f 2) + +echo Testing k8s cluster connectivity to NAT.. +gcloud container clusters get-credentials $k8s_name --zone $k8s_zone --project $k8s_project > /dev/null + +chk_ext_ip=$(kubectl run test -it --restart=Never --image=centos:7 -- curl -s http://ipinfo.io/ip) +kubectl delete po test > /dev/null + +echo $k8s_ext_ip $chk_ext_ip diff --git a/testdata/xray/jas-config/iac/gcp/k8s-oss/module.tf b/testdata/xray/jas-config/iac/gcp/k8s-oss/module.tf new file mode 100644 index 000000000..53746b756 --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-oss/module.tf @@ -0,0 +1,158 @@ +# Create new K8S cluster with autoscaling + +data "google_container_engine_versions" "region" { + location = var.region +} + +resource "random_string" "admin-password" { + count = var.module_enabled ? 1 : 0 + length = 16 + +// lifecycle { +// ignore_changes = [ +// initial_node_count, master_authorized_networks_config +// ] +// } +} + +# New K8s Cluster, if creation failed you'll need to cleanup manually before running again. +resource "google_container_cluster" "primary" { + count = var.module_enabled ? 1 : 0 + provider = google-beta + name = "${var.deploy_name}-${var.region}" + location = var.k8s_zonal == "" ? var.region : var.region_zone + min_master_version = var.k8s_master_version == "" ? data.google_container_engine_versions.region.latest_master_version : var.k8s_master_version + network = var.network + subnetwork = var.subnetwork + logging_service = var.logging_service + monitoring_service = var.monitoring_service + enable_legacy_abac = var.enable_legacy_abac + remove_default_node_pool = "true" + initial_node_count = 1 + enable_shielded_nodes = var.gke_auth.shielded_nodes + enable_intranode_visibility = var.gke_auth.enable_intranode_visibility + + master_auth { + username = var.gke_auth.basic_auth ? "basic-admin" : "" + password = var.gke_auth.basic_auth ? random_string.admin-password[0].result : "" + + client_certificate_config { + issue_client_certificate = var.client_certificate + } + } + + private_cluster_config { + enable_private_endpoint = false + enable_private_nodes = true + master_ipv4_cidr_block = var.subnet_cidr["k8s-private"] + } + + ip_allocation_policy { + cluster_secondary_range_name = "pods-private-range" + services_secondary_range_name = "services-private-range" + } + + # Authoroized networks allowed to access the Master + + master_authorized_networks_config { + cidr_blocks { + cidr_block = "82.81.195.5/32" + display_name = "jfrog-office" + } + cidr_blocks { + cidr_block = "52.8.67.255/32" + display_name = "GlobalVpn" + } + cidr_blocks { + cidr_block = "12.252.18.78/32" + display_name = "US Office HA Public" + } + cidr_blocks { + cidr_block = "52.9.243.19/32" + display_name = "US IT AWS-NATGW" + } + cidr_blocks { + cidr_block = "52.215.237.185/32" + display_name = "EU IT AWS-NATGW" + } + cidr_blocks { + cidr_block = "52.16.203.109/32" + display_name = "GlobalVpn" + } + cidr_blocks { + cidr_block = "146.148.8.199/32" + display_name = "GCP jfrog-dev NAT" + } + cidr_blocks { + cidr_block = "192.168.20.0/24" //should be 192.168.21.0/24 + display_name = "all_local" + } + cidr_blocks { + cidr_block = "${var.natgw_ip[0]}/32" + display_name = "natgw" + } + cidr_blocks { + cidr_block = "${var.natgw_ip[1]}/32" + display_name = "natgw" + } + } + lifecycle { + ignore_changes = [ + initial_node_count, master_authorized_networks_config, master_auth + ] + } +} + +# K8s cluster node pool creation +resource "google_container_node_pool" "worker" { + count = var.module_enabled ? 1 : 0 + name = var.override_ng_name == "" ? "${var.deploy_name}-${var.region}-ng-1" : var.override_ng_name + location = var.k8s_zonal == "" ? var.region : var.region_zone + cluster = google_container_cluster.primary[0].name + node_count = 1 + version = var.k8s_node_version == "" ? data.google_container_engine_versions.region.latest_node_version : var.k8s_node_version + + autoscaling { + min_node_count = var.min_node_count + max_node_count = var.max_node_count + } + + management { + auto_repair = lookup(var.node_config, "node_auto_repair") + auto_upgrade = lookup(var.node_config, "node_auto_upgrade" ) + } + + node_config { + machine_type = var.worker_machine_type + image_type = var.image_type + disk_size_gb = var.ng_disk_size_gb + disk_type = "pd-ssd" + + shielded_instance_config { + enable_secure_boot = lookup(var.node_config, "enable_secure_boot" ) + } +// workload_metadata_config { +// node_metadata = "GKE_METADATA_SERVER" +// } +// oauth_scopes = [ +// "https://www.googleapis.com/auth/compute", +// "https://www.googleapis.com/auth/devstorage.read_only", +// "https://www.googleapis.com/auth/logging.write", +// "https://www.googleapis.com/auth/monitoring", +// ] + +// labels = { +// cluster = var.label +// } +// metadata = { +// ssh-keys = "ubuntu:${var.ssh_key} ubuntu" +// disable-legacy-endpoints = "true" +// } + tags = var.instance_tags + } + lifecycle { + ignore_changes = [ + autoscaling.0.max_node_count, node_count + ] + } +} \ No newline at end of file diff --git a/testdata/xray/jas-config/iac/gcp/k8s-oss/outputs.tf b/testdata/xray/jas-config/iac/gcp/k8s-oss/outputs.tf new file mode 100644 index 000000000..7d3cea754 --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-oss/outputs.tf @@ -0,0 +1,54 @@ +# The following outputs allow authentication and connectivity to the GKE Cluster. +output "client_certificate" { + value = google_container_cluster.primary.*.master_auth.0.client_certificate +} + +output "client_key" { + value = element( + concat( + google_container_cluster.primary.*.master_auth.0.client_key, + [""], + ), + 0, + ) +} + +output "cluster_ca_certificate" { + value = element( + concat( + google_container_cluster.primary.*.master_auth.0.cluster_ca_certificate, + [""], + ), + 0, + ) +} + +output "cluster_name" { + value = element(concat(google_container_cluster.primary.*.name, [""]), 0) +} + +output "cluster_ip" { + value = element(concat(google_container_cluster.primary.*.endpoint, [""]), 0) +} + +output "cluster_username" { + value = element( + concat( + google_container_cluster.primary.*.master_auth.0.username, + [""], + ), + 0, + ) +} + +output "cluster_password" { + value = element( + concat( + google_container_cluster.primary.*.master_auth.0.password, + [""], + ), + 0, + ) + sensitive = true +} + diff --git a/testdata/xray/jas-config/iac/gcp/k8s-oss/variables.tf b/testdata/xray/jas-config/iac/gcp/k8s-oss/variables.tf new file mode 100644 index 000000000..1124605c7 --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-oss/variables.tf @@ -0,0 +1,102 @@ +variable "module_enabled" { + default = true +} + +variable "project_name" { +} + +variable "region" { +} + +variable "region_zone" { +} + +variable "deploy_name" { +} + +variable "network" { +} + +variable "subnetwork" { +} + +variable "instance_tags" { + type = list(string) +} + +variable "subnet_cidr" { + type = map(string) +} + +variable "min_node_count" { +} + +variable "max_node_count" { +} + +variable "logging_service" { +} + +variable "monitoring_service" { +} + +variable "enable_legacy_abac" { +} + +variable "worker_machine_type" { +} + +variable "ft_machine_type" { +} + +variable "image_type" { +} + +variable "ng_disk_size_gb" { +} +variable "ft_disk_size_gb" { +} + +variable "label" { +} + +variable "natgw_ip" { +} + +variable "gcp_azs" { + type = map(string) + default = { + us-east1 = "us-east1-c,us-east1-d" + us-west1 = "us-west1-c,us-west1-a" + us-central1 = "us-central1-c,us-central1-f" + europe-west2 = "europe-west2-a,europe-west2-c" + europe-west1 = "europe-west1-c,europe-west1-d" + } +} + +variable "ssh_key" { +} + +variable "k8s_master_version" { +} + +variable "k8s_node_version" { +} + +variable "client_certificate" { +} + +variable "k8s_zonal" { +} + +variable "override_ft_name" { +} + +variable "override_ng_name" { +} + +variable "gke_auth" { +} + +variable "node_config" { +} \ No newline at end of file diff --git a/testdata/xray/jas-config/iac/gcp/k8s-oss/versions.tf b/testdata/xray/jas-config/iac/gcp/k8s-oss/versions.tf new file mode 100644 index 000000000..ac97c6ac8 --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-oss/versions.tf @@ -0,0 +1,4 @@ + +terraform { + required_version = ">= 0.12" +} diff --git a/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/files/chk_k8s_nat b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/files/chk_k8s_nat new file mode 100644 index 000000000..f4f318bbf --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/files/chk_k8s_nat @@ -0,0 +1,17 @@ +#!/bin/bash + +# This script checks the functionality of the NAT gateway VM. + +tf_output=$(terraform output) +k8s_name=$(echo "$tf_output" | grep k8s_cluster_name | cut -d = -f 2) +k8s_project=$(echo "$tf_output" | grep k8s_project | cut -d = -f 2) +k8s_zone=$(echo "$tf_output" | grep k8s_zone | cut -d = -f 2) +k8s_ext_ip=$(echo "$tf_output" | grep external_ip | cut -d = -f 2) + +echo Testing k8s cluster connectivity to NAT.. +gcloud container clusters get-credentials $k8s_name --zone $k8s_zone --project $k8s_project > /dev/null + +chk_ext_ip=$(kubectl run test -it --restart=Never --image=centos:7 -- curl -s http://ipinfo.io/ip) +kubectl delete po test > /dev/null + +echo $k8s_ext_ip $chk_ext_ip diff --git a/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/module.tf b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/module.tf new file mode 100644 index 000000000..f2d68663c --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/module.tf @@ -0,0 +1,207 @@ +# Create new K8S cluster with autoscaling + +data "google_container_engine_versions" "region" { + location = var.region +} + +resource "random_string" "admin-password" { + count = var.module_enabled ? 1 : 0 + length = 16 + +// lifecycle { +// ignore_changes = [ +// initial_node_count, master_authorized_networks_config +// ] +// } +} + +# New K8s Cluster, if creation failed you'll need to cleanup manually before running again. +resource "google_container_cluster" "primary" { + count = var.module_enabled ? 1 : 0 + provider = google-beta + name = "${var.deploy_name}-${var.region}" + location = var.k8s_zonal == "" ? var.region : var.region_zone + min_master_version = var.k8s_master_version == "" ? data.google_container_engine_versions.region.latest_master_version :lookup(var.gke_map.override,"k8s_master_version") + network = var.network + subnetwork = var.subnetwork + logging_service = var.logging_service + monitoring_service = var.monitoring_service + enable_legacy_abac = var.enable_legacy_abac + remove_default_node_pool = "true" + initial_node_count = 1 + enable_shielded_nodes = var.gke_auth.shielded_nodes + enable_intranode_visibility = var.enable_intranode_visibility + + master_auth { + username = var.gke_auth.basic_auth ? "basic-admin" : "" + password = var.gke_auth.basic_auth ? random_string.admin-password[0].result : "" + + client_certificate_config { + issue_client_certificate = var.client_certificate + } + } + + private_cluster_config { + enable_private_endpoint = false + enable_private_nodes = true + master_ipv4_cidr_block = var.subnet_cidr["k8s-private"] + } + + ip_allocation_policy { + cluster_secondary_range_name = "pods-private-range" + services_secondary_range_name = "services-private-range" + } + maintenance_policy { + recurring_window { + recurrence = var.maintenance_window.recurrence + start_time = var.maintenance_window.start_time + end_time = var.maintenance_window.end_time + } + } + + # Authoroized networks allowed to access the Master + + # master_authorized_networks_config { + + # cidr_blocks { + # cidr_block = "${var.natgw_ip[0]}/32" + # display_name = "natgw" + # } + # cidr_blocks { + # cidr_block = "${var.natgw_ip[1]}/32" + # display_name = "natgw" + # } + # } + master_authorized_networks_config { + dynamic "cidr_blocks" { + for_each = var.gke_map.override["public_access_cidrs"] + iterator = authorized_network + content { + cidr_block = authorized_network.value.cidr_block + display_name = authorized_network.value.display_name + } + } + } + + dynamic "resource_usage_export_config" { + for_each = toset(var.resource_usage_export_config_parameters != null ? ["exec"] : []) + content { + enable_network_egress_metering = lookup(var.resource_usage_export_config_parameters, "enable_network_egress_metering") + enable_resource_consumption_metering = lookup(var.resource_usage_export_config_parameters, "enable_resource_consumption_metering") + bigquery_destination { + dataset_id = lookup(var.resource_usage_export_config_parameters, "bigquery_destination.dataset_id") + } + } + } + + lifecycle { + ignore_changes = [ + initial_node_count, master_auth + ] + } +} + +# K8s cluster node pool creation +resource "google_container_node_pool" "worker" { + count = contains(keys(var.gke_map),"ng") ? 1 : 0 + name = lookup(var.gke_map.ng, "name", "${var.deploy_name}-${var.region}-ng-1" ) + location = var.k8s_zonal == "" ? var.region : var.region_zone + cluster = google_container_cluster.primary[0].name + node_count = 1 + version = lookup(var.gke_map.override, "k8s_node_version", data.google_container_engine_versions.region.latest_node_version) + +dynamic "autoscaling" { + for_each = toset(var.autoscaling_parameters != null ? ["exec"] : []) + content { + min_node_count = lookup(var.autoscaling_parameters, "min_node_count") + max_node_count = lookup(var.autoscaling_parameters, "max_node_count") + } + } + management { + auto_repair = lookup(var.node_config, "node_auto_repair") + auto_upgrade = lookup(var.node_config, "node_auto_upgrade" ) + } + node_config { + machine_type = lookup(var.gke_map.ng, "instance_type", "n2-highmem-2") + image_type = lookup(var.gke_map.ng, "image_type","COS") + disk_size_gb = lookup(var.gke_map.ng, "disk_size", "2000") + disk_type = "pd-ssd" + oauth_scopes = var.oauth_scopes + + shielded_instance_config { + enable_secure_boot = lookup(var.node_config, "enable_secure_boot" ) + } +// workload_metadata_config { +// node_metadata = "GKE_METADATA_SERVER" +// } + + +// labels = { +// cluster = var.label +// } + metadata = { + ssh-keys = "${var.ssh_key}" + disable-legacy-endpoints = "true" + } + tags = var.instance_tags + } + lifecycle { + ignore_changes = [ + autoscaling.0.max_node_count, node_count + ] + } +} +###node group for devops### +resource "google_container_node_pool" "devops_nodegroup" { + count = contains(keys(var.gke_map),"devops") ? 1 : 0 + name = lookup(var.gke_map.devops, "name", "${var.deploy_name}-${var.region}-ng-1" ) + location = var.k8s_zonal == "" ? var.region : var.region_zone + cluster = google_container_cluster.primary[0].name + node_count = 1 + version = lookup(var.gke_map.override, "k8s_node_version", data.google_container_engine_versions.region.latest_node_version) + management { + auto_repair = lookup(var.node_config, "node_auto_repair") + auto_upgrade = lookup(var.node_config, "node_auto_upgrade" ) + } + +dynamic "autoscaling" { + for_each = toset(var.gke_map.devops["autoscaling_parameters"] != {} ? ["exec"] : []) + content { + min_node_count = lookup(var.gke_map.devops["autoscaling_parameters"], "min_node_count") + max_node_count = lookup(var.gke_map.devops["autoscaling_parameters"], "max_node_count") + } + } + + node_config { + machine_type = lookup(var.gke_map.devops, "instance_type", "n2-standard-2") + labels = { + "k8s.jfrog.com/pool_type" = "devops" + } + image_type = var.image_type + disk_size_gb = lookup(var.gke_map.devops, "disk_size", "2000") + disk_type = "pd-ssd" + oauth_scopes= var.oauth_scopes + taint { + effect = "NO_SCHEDULE" + key = "pool_type" + value = "devops" + } + shielded_instance_config { + enable_secure_boot = lookup(var.node_config, "enable_secure_boot" ) + } +// workload_metadata_config { +// node_metadata = "GKE_METADATA_SERVER" +// } + + metadata = { + ssh-keys = "ubuntu:${var.ssh_key} ubuntu" + disable-legacy-endpoints = "true" + } + tags = var.instance_tags + } + lifecycle { + ignore_changes = [ + autoscaling.0.max_node_count, node_count + ] + } +} \ No newline at end of file diff --git a/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/outputs.tf b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/outputs.tf new file mode 100644 index 000000000..7d3cea754 --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/outputs.tf @@ -0,0 +1,54 @@ +# The following outputs allow authentication and connectivity to the GKE Cluster. +output "client_certificate" { + value = google_container_cluster.primary.*.master_auth.0.client_certificate +} + +output "client_key" { + value = element( + concat( + google_container_cluster.primary.*.master_auth.0.client_key, + [""], + ), + 0, + ) +} + +output "cluster_ca_certificate" { + value = element( + concat( + google_container_cluster.primary.*.master_auth.0.cluster_ca_certificate, + [""], + ), + 0, + ) +} + +output "cluster_name" { + value = element(concat(google_container_cluster.primary.*.name, [""]), 0) +} + +output "cluster_ip" { + value = element(concat(google_container_cluster.primary.*.endpoint, [""]), 0) +} + +output "cluster_username" { + value = element( + concat( + google_container_cluster.primary.*.master_auth.0.username, + [""], + ), + 0, + ) +} + +output "cluster_password" { + value = element( + concat( + google_container_cluster.primary.*.master_auth.0.password, + [""], + ), + 0, + ) + sensitive = true +} + diff --git a/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/rbac.tf b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/rbac.tf new file mode 100644 index 000000000..2b2c06dc2 --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/rbac.tf @@ -0,0 +1,61 @@ +provider "kubernetes" { + host = try(var.gke_map.override["k8s_sdm"], "https://${google_container_cluster.primary.*.endpoint}") +} + +resource "kubernetes_cluster_role_binding" "sdm-roles" { +for_each = toset(var.rbac_admin_roles) + metadata { + name = "${each.value}" + } + role_ref { + api_group = "rbac.authorization.k8s.io" + kind = "ClusterRole" + name = "cluster-admin" + } + subject { + kind = "Group" + name = "${each.value}" + api_group = "rbac.authorization.k8s.io" + } +} + + +resource "kubernetes_cluster_role" "sdm-ro-roles" { + for_each = toset(var.rbac_readonly_roles) + metadata { + name = "${each.value}" + } + + rule { + api_groups = [""] + resources = ["*"] + verbs = ["get", "list", "watch"] + } + rule { + api_groups = ["extensions"] + resources = ["*"] + verbs = ["get", "list", "watch"] + } + rule { + api_groups = ["apps"] + resources = ["*"] + verbs = ["get", "list", "watch"] + } +} + +resource "kubernetes_cluster_role_binding" "sdm-ro-roles" { + for_each = toset(var.rbac_readonly_roles) + metadata { + name = "${each.value}" + } + role_ref { + api_group = "rbac.authorization.k8s.io" + kind = "ClusterRole" + name = "${each.value}" + } + subject { + kind = "Group" + name = "${each.value}" + api_group = "rbac.authorization.k8s.io" + } +} \ No newline at end of file diff --git a/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/variables.tf b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/variables.tf new file mode 100644 index 000000000..f025f6049 --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/variables.tf @@ -0,0 +1,137 @@ +variable "module_enabled" { + default = true +} + +variable "project_name" { +} + +variable "region" { +} + +variable "region_zone" { +} + +variable "deploy_name" { +} + +variable "network" { +} +variable "subnetwork" { +} + +variable "instance_tags" { + type = list(string) +} + +variable "subnet_cidr" { + type = map(string) +} + +variable "min_node_count" { +} + +variable "max_node_count" { +} + +variable "logging_service" { +} + +variable "monitoring_service" { +} + +variable "enable_legacy_abac" { +} + +variable "worker_machine_type" { +} + +variable "ft_machine_type" { +} + +variable "image_type" { +} + +variable "ng_disk_size_gb" { +} +variable "ft_disk_size_gb" { +} + +variable "label" { +} + +variable "natgw_ip" { +} + +variable "gcp_azs" { + type = map(string) + default = { + us-east1 = "us-east1-c,us-east1-d" + us-west1 = "us-west1-c,us-west1-a" + us-central1 = "us-central1-c,us-central1-f" + europe-west2 = "europe-west2-a,europe-west2-c" + europe-west1 = "europe-west1-c,europe-west1-d" + } +} + +variable "ssh_key" { +} + +variable "k8s_master_version" { +} + +variable "k8s_node_version" { +} + +variable "client_certificate" { +} + +variable "k8s_zonal" { +} + +variable "override_ft_name" { +} + +variable "override_ng_name" { +} + +variable "autoscaling_parameters"{ +} + +variable "gke_map"{ + +} +variable "network_policy" { + default = false +} +variable "maintenance_window" { + default = { + recurrence = "FREQ=WEEKLY;BYDAY=SU" + start_time = "2021-11-21T01:00:00Z" + end_time = "2021-11-21T18:00:00Z" + } +} + +variable "gke_auth" { +} + +variable "oauth_scopes" { + default = [ + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring", + ] +} + +variable "node_config"{ +} +variable "enable_intranode_visibility" {} +variable "rbac_admin_roles"{ +default = [] +} + +variable "rbac_readonly_roles"{ +default = [] +} + +variable "resource_usage_export_config_parameters" { + default = null +} \ No newline at end of file diff --git a/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/versions.tf b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/versions.tf new file mode 100644 index 000000000..ac97c6ac8 --- /dev/null +++ b/testdata/xray/jas-config/iac/gcp/k8s-pipelines-bp/versions.tf @@ -0,0 +1,4 @@ + +terraform { + required_version = ">= 0.12" +} diff --git a/testdata/xray/jas-config/main.py b/testdata/xray/jas-config/main.py new file mode 100644 index 000000000..064571e46 --- /dev/null +++ b/testdata/xray/jas-config/main.py @@ -0,0 +1,5 @@ +import yaml + +with open('example.yaml') as f: + data = yaml.full_load(f) + print(data) \ No newline at end of file diff --git a/testdata/xray/jas-config/requirements.txt b/testdata/xray/jas-config/requirements.txt new file mode 100644 index 000000000..79bfd143b --- /dev/null +++ b/testdata/xray/jas-config/requirements.txt @@ -0,0 +1,2 @@ +PyYAML==5.2 +Werkzeug==1.0.1 \ No newline at end of file diff --git a/testdata/xray/jas-config/sast/flask_webgoat/__init__.py b/testdata/xray/jas-config/sast/flask_webgoat/__init__.py new file mode 100644 index 000000000..9e2f505a6 --- /dev/null +++ b/testdata/xray/jas-config/sast/flask_webgoat/__init__.py @@ -0,0 +1,51 @@ +import os +import sqlite3 +from pathlib import Path + +from flask import Flask, g + +DB_FILENAME = "database.db" + + +def query_db(query, args=(), one=False, commit=False): + with sqlite3.connect(DB_FILENAME) as conn: + # vulnerability: Sensitive Data Exposure + conn.set_trace_callback(print) + cur = conn.cursor().execute(query, args) + if commit: + conn.commit() + return cur.fetchone() if one else cur.fetchall() + + +def create_app(): + app = Flask(__name__) + app.secret_key = "aeZ1iwoh2ree2mo0Eereireong4baitixaixu5Ee" + + db_path = Path(DB_FILENAME) + if db_path.exists(): + db_path.unlink() + + conn = sqlite3.connect(DB_FILENAME) + create_table_query = """CREATE TABLE IF NOT EXISTS user + (id INTEGER PRIMARY KEY, username TEXT, password TEXT, access_level INTEGER)""" + conn.execute(create_table_query) + + insert_admin_query = """INSERT INTO user (id, username, password, access_level) + VALUES (1, 'admin', 'admin', 0)""" + conn.execute(insert_admin_query) + conn.commit() + conn.close() + + with app.app_context(): + from . import actions + from . import auth + from . import status + from . import ui + from . import users + + app.register_blueprint(actions.bp) + app.register_blueprint(auth.bp) + app.register_blueprint(status.bp) + app.register_blueprint(ui.bp) + app.register_blueprint(users.bp) + return app diff --git a/testdata/xray/jas-config/sast/flask_webgoat/ui.py b/testdata/xray/jas-config/sast/flask_webgoat/ui.py new file mode 100644 index 000000000..2b0bd0608 --- /dev/null +++ b/testdata/xray/jas-config/sast/flask_webgoat/ui.py @@ -0,0 +1,25 @@ +import sqlite3 + +from flask import Blueprint, request, render_template +from . import query_db + +bp = Blueprint("ui", __name__) + + +@bp.route("/search") +def search(): + query_param = request.args.get("query") + if query_param is None: + message = "please provide the query parameter" + return render_template("error.html", message=message) + + try: + query = "SELECT username, access_level FROM user WHERE username LIKE ?;" + results = query_db(query, (query_param,)) + # vulnerability: XSS + return render_template( + "search.html", results=results, num_results=len(results), query=query_param + ) + except sqlite3.Error as err: + message = "Error while executing query " + query_param + ": " + err + return render_template("error.html", message=message) diff --git a/testdata/xray/jas-config/sast/result.sarif b/testdata/xray/jas-config/sast/result.sarif new file mode 100644 index 000000000..839f34816 --- /dev/null +++ b/testdata/xray/jas-config/sast/result.sarif @@ -0,0 +1,618 @@ +{ + "runs": [ + { + "tool": { + "driver": { + "name": "USAF", + "rules": [ + { + "id": "python-flask-debug", + "defaultConfiguration": { + "parameters": { + "properties": { + "CWE": "1295" + } + } + }, + "fullDescription": { + "text": "\n### Overview\nDebug mode in a Flask app is a feature that allows the developer to see detailed\nerror messages and tracebacks when an error occurs. This can be useful for debugging\nand troubleshooting, but it can also create a security vulnerability if the app is\ndeployed in debug mode. In debug mode, Flask will display detailed error messages and\ntracebacks to the user, even if the error is caused by malicious input.\nThis can provide attackers with valuable information about the app's internal workings\nand vulnerabilities, making it easier for them to exploit those vulnerabilities.\n\n### Query operation\nIn this query we look Flask applications that set the `debug` argument to `True`\n\n### Vulnerable example\n```python\nfrom flask import Flask\n\napp = Flask(__name__)\n\n@app.route('/')\ndef hello():\n return 'Hello, World!'\n\nif __name__ == '__main__':\n app.run(debug=True)\n```\nIn this example, the Flask application is set to run in debug mode by passing\n`debug=True` as an argument to the `app.run()` function. This will make the application\nemit potentially sensitive information to the users.\n\n### Remediation\nWhen using `app.run`, omit the `debug` flag or set it to `False` -\n```diff\nif __name__ == '__main__':\n- app.run(debug=True)\n+ app.run()\n```\n", + "markdown": "\n### Overview\nDebug mode in a Flask app is a feature that allows the developer to see detailed\nerror messages and tracebacks when an error occurs. This can be useful for debugging\nand troubleshooting, but it can also create a security vulnerability if the app is\ndeployed in debug mode. In debug mode, Flask will display detailed error messages and\ntracebacks to the user, even if the error is caused by malicious input.\nThis can provide attackers with valuable information about the app's internal workings\nand vulnerabilities, making it easier for them to exploit those vulnerabilities.\n\n### Query operation\nIn this query we look Flask applications that set the `debug` argument to `True`\n\n### Vulnerable example\n```python\nfrom flask import Flask\n\napp = Flask(__name__)\n\n@app.route('/')\ndef hello():\n return 'Hello, World!'\n\nif __name__ == '__main__':\n app.run(debug=True)\n```\nIn this example, the Flask application is set to run in debug mode by passing\n`debug=True` as an argument to the `app.run()` function. This will make the application\nemit potentially sensitive information to the users.\n\n### Remediation\nWhen using `app.run`, omit the `debug` flag or set it to `False` -\n```diff\nif __name__ == '__main__':\n- app.run(debug=True)\n+ app.run()\n```\n" + }, + "shortDescription": { + "text": "Flask Running in Debug" + } + }, + { + "id": "python-stack-trace-exposure", + "defaultConfiguration": { + "parameters": { + "properties": { + "CWE": "209" + } + } + }, + "fullDescription": { + "text": "\n### Overview\nStack trace exposure is a type of security vulnerability that occurs when a program reveals\nsensitive information, such as the names and locations of internal files and variables,\nin error messages or other diagnostic output. This can happen when a program crashes or\nencounters an error, and the stack trace (a record of the program's call stack at the time\nof the error) is included in the output. Stack trace exposure can provide attackers with\nvaluable information about a program's internal workings and vulnerabilities, making it\neasier for them to exploit those vulnerabilities and gain unauthorized access\nto the system.\n\n### Query operation\nIn this query we look for any stack trace information flowing into the output.\n\n### Vulnerable example\n```python\nimport traceback\n\ndef my_function():\n try:\n # Some code that may raise an exception\n raise ValueError('Something went wrong')\n except ValueError as e:\n traceback.print_tb(e.__traceback__)\n\nmy_function()\n```\nIn this example, the `my_function()` function intentionally raises\na `ValueError` exception.\nThe `traceback.print_tb()` function is then used to print the stack trace\nwhen the exception is caught. The vulnerability lies in using `traceback.print_tb()`\nto output the stack trace directly to the console or any other output stream.\nIf this code were part of a web application or exposed through an API,\nthe stack trace would be exposed in the server logs or potentially returned\nas part of an error response to the client.\n\n### Remediation\nLog the exception to a logging framework or file, instead of outputting directly to the\nconsole-\n\n```python\ndef log_exception(exception):\n logging.exception('An exception occurred', exc_info=exception)\n```\n\n```diff\ndef my_function():\n try:\n # Some code that may raise an exception\n raise ValueError('Something went wrong')\n except ValueError as e:\n- traceback.print_tb(e.__traceback__)\n+ log_exception(e)\n```\n", + "markdown": "\n### Overview\nStack trace exposure is a type of security vulnerability that occurs when a program reveals\nsensitive information, such as the names and locations of internal files and variables,\nin error messages or other diagnostic output. This can happen when a program crashes or\nencounters an error, and the stack trace (a record of the program's call stack at the time\nof the error) is included in the output. Stack trace exposure can provide attackers with\nvaluable information about a program's internal workings and vulnerabilities, making it\neasier for them to exploit those vulnerabilities and gain unauthorized access\nto the system.\n\n### Query operation\nIn this query we look for any stack trace information flowing into the output.\n\n### Vulnerable example\n```python\nimport traceback\n\ndef my_function():\n try:\n # Some code that may raise an exception\n raise ValueError('Something went wrong')\n except ValueError as e:\n traceback.print_tb(e.__traceback__)\n\nmy_function()\n```\nIn this example, the `my_function()` function intentionally raises\na `ValueError` exception.\nThe `traceback.print_tb()` function is then used to print the stack trace\nwhen the exception is caught. The vulnerability lies in using `traceback.print_tb()`\nto output the stack trace directly to the console or any other output stream.\nIf this code were part of a web application or exposed through an API,\nthe stack trace would be exposed in the server logs or potentially returned\nas part of an error response to the client.\n\n### Remediation\nLog the exception to a logging framework or file, instead of outputting directly to the\nconsole-\n\n```python\ndef log_exception(exception):\n logging.exception('An exception occurred', exc_info=exception)\n```\n\n```diff\ndef my_function():\n try:\n # Some code that may raise an exception\n raise ValueError('Something went wrong')\n except ValueError as e:\n- traceback.print_tb(e.__traceback__)\n+ log_exception(e)\n```\n" + }, + "shortDescription": { + "text": "Stack Trace Exposure" + } + }, + { + "id": "python-xss", + "defaultConfiguration": { + "parameters": { + "properties": { + "CWE": "79" + } + } + }, + "fullDescription": { + "text": "\n### Overview\nXSS, or Cross-Site Scripting, is a type of vulnerability that allows an attacker to\ninject malicious code into a website or web application.\nThis can allow the attacker to steal sensitive information from users, such as their\ncookies or login credentials, or to perform unauthorized actions on their behalf.\n\n### Query operation\nIn the query we look for any user input that flows into\na potential output of the application.\n\n### Vulnerable example\nIn the following example, the Flask application takes a user-supplied parameter (`name`)\nfrom the query string and renders it directly into an HTML template using the\n`render_template_string` function. The issue is that\nthe user input is not properly sanitized or escaped, making it vulnerable to XSS attacks.\n```python\nfrom flask import Flask, request, render_template_string\n\napp = Flask(__name__)\n\n@app.route('/')\ndef index():\n name = request.args.get('name', 'Guest')\n message = f'Hello, {name}!'\n return render_template_string('

{}

'.format(message))\n\nif __name__ == '__main__':\napp.run()\n```\nAn attacker can exploit this vulnerability by injecting malicious JavaScript code into the\n`name` parameter. For instance, they could modify the URL to include the following payload:\n`http://localhost:5000/?name=`\n\n### Remediation\nWhen rendering templates, use parametrized variable assignments (which are automatically\nescaped) instead of direct string manipulation -\n```diff\n@app.route('/')\ndef index():\n name = request.args.get('name', 'Guest')\n message = f'Hello, {name}!'\n- return render_template_string('

{}

'.format(message))\n+ return render_template_string('

{{ message }}

', message=message)\n```\n", + "markdown": "\n### Overview\nXSS, or Cross-Site Scripting, is a type of vulnerability that allows an attacker to\ninject malicious code into a website or web application.\nThis can allow the attacker to steal sensitive information from users, such as their\ncookies or login credentials, or to perform unauthorized actions on their behalf.\n\n### Query operation\nIn the query we look for any user input that flows into\na potential output of the application.\n\n### Vulnerable example\nIn the following example, the Flask application takes a user-supplied parameter (`name`)\nfrom the query string and renders it directly into an HTML template using the\n`render_template_string` function. The issue is that\nthe user input is not properly sanitized or escaped, making it vulnerable to XSS attacks.\n```python\nfrom flask import Flask, request, render_template_string\n\napp = Flask(__name__)\n\n@app.route('/')\ndef index():\n name = request.args.get('name', 'Guest')\n message = f'Hello, {name}!'\n return render_template_string('

{}

'.format(message))\n\nif __name__ == '__main__':\napp.run()\n```\nAn attacker can exploit this vulnerability by injecting malicious JavaScript code into the\n`name` parameter. For instance, they could modify the URL to include the following payload:\n`http://localhost:5000/?name=`\n\n### Remediation\nWhen rendering templates, use parametrized variable assignments (which are automatically\nescaped) instead of direct string manipulation -\n```diff\n@app.route('/')\ndef index():\n name = request.args.get('name', 'Guest')\n message = f'Hello, {name}!'\n- return render_template_string('

{}

'.format(message))\n+ return render_template_string('

{{ message }}

', message=message)\n```\n" + }, + "shortDescription": { + "text": "XSS Vulnerability" + } + } + ] + } + }, + "invocations": [ + { + "executionSuccessful": true, + "arguments": [ + "/Users/assafa/.jfrog/dependencies/analyzerManager/zd_scanner/scanner", + "scan", + "/var/folders/xv/th4cksxn7jv9wjrdnn1h4tj00000gq/T/jfrog.cli.temp.-1693492973-1963413933/results.sarif" + ], + "workingDirectory": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast" + } + } + ], + "results": [ + { + "message": { + "text": "Stack Trace Exposure" + }, + "level": "note", + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.__init__.query_db" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/__init__.py" + }, + "region": { + "endColumn": 39, + "endLine": 13, + "snippet": { + "text": "conn.set_trace_callback(print)" + }, + "startColumn": 9, + "startLine": 13 + } + } + } + ], + "ruleId": "python-stack-trace-exposure" + }, + { + "message": { + "text": "Stack Trace Exposure" + }, + "level": "note", + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.__init__.query_db" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/__init__.py" + }, + "region": { + "endColumn": 39, + "endLine": 13, + "snippet": { + "text": "conn.set_trace_callback(print)" + }, + "startColumn": 9, + "startLine": 13 + } + } + } + ], + "ruleId": "python-stack-trace-exposure" + }, + { + "message": { + "text": "XSS Vulnerability" + }, + "codeFlows": [ + { + "threadFlows": [ + { + "locations": [ + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 31, + "endLine": 11, + "snippet": { + "text": "request.args" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 35, + "endLine": 11, + "snippet": { + "text": "request.args.get" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 44, + "endLine": 11, + "snippet": { + "text": "request.args.get(\"query\")" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 16, + "endLine": 11, + "snippet": { + "text": "query_param" + }, + "startColumn": 5, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 10, + "endLine": 22, + "snippet": { + "text": "render_template(\n \"search.html\", results=results, num_results=len(results), query=query_param\n )" + }, + "startColumn": 16, + "startLine": 20 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 10, + "endLine": 22, + "snippet": { + "text": "return render_template(\n \"search.html\", results=results, num_results=len(results), query=query_param\n )" + }, + "startColumn": 9, + "startLine": 20 + } + } + } + } + ] + } + ] + } + ], + "level": "error", + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 10, + "endLine": 22, + "snippet": { + "text": "return render_template(\n \"search.html\", results=results, num_results=len(results), query=query_param\n )" + }, + "startColumn": 9, + "startLine": 20 + } + } + } + ], + "ruleId": "python-xss" + }, + { + "message": { + "text": "XSS Vulnerability" + }, + "codeFlows": [ + { + "threadFlows": [ + { + "locations": [ + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 31, + "endLine": 11, + "snippet": { + "text": "request.args" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 35, + "endLine": 11, + "snippet": { + "text": "request.args.get" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 44, + "endLine": 11, + "snippet": { + "text": "request.args.get(\"query\")" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 16, + "endLine": 11, + "snippet": { + "text": "query_param" + }, + "startColumn": 5, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 63, + "endLine": 24, + "snippet": { + "text": "\"Error while executing query \" + query_param" + }, + "startColumn": 19, + "startLine": 24 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 70, + "endLine": 24, + "snippet": { + "text": "\"Error while executing query \" + query_param + \": \"" + }, + "startColumn": 19, + "startLine": 24 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 76, + "endLine": 24, + "snippet": { + "text": "\"Error while executing query \" + query_param + \": \" + err" + }, + "startColumn": 19, + "startLine": 24 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 16, + "endLine": 24, + "snippet": { + "text": "message" + }, + "startColumn": 9, + "startLine": 24 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 62, + "endLine": 25, + "snippet": { + "text": "render_template(\"error.html\", message=message)" + }, + "startColumn": 16, + "startLine": 25 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 62, + "endLine": 25, + "snippet": { + "text": "return render_template(\"error.html\", message=message)" + }, + "startColumn": 9, + "startLine": 25 + } + } + } + } + ] + } + ] + } + ], + "level": "error", + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 62, + "endLine": 25, + "snippet": { + "text": "return render_template(\"error.html\", message=message)" + }, + "startColumn": 9, + "startLine": 25 + } + } + } + ], + "ruleId": "python-xss" + }, + { + "message": { + "text": "Flask Running in Debug" + }, + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "run" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/run.py" + }, + "region": { + "endColumn": 24, + "endLine": 15, + "snippet": { + "text": "app.run(debug=True)" + }, + "startColumn": 5, + "startLine": 15 + } + } + } + ], + "ruleId": "python-flask-debug" + } + ] + } + ], + "version": "2.1.0", + "$schema": "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos02/schemas/sarif-schema-2.1.0.json" +} \ No newline at end of file diff --git a/testdata/xray/jas-config/sast/run.py b/testdata/xray/jas-config/sast/run.py new file mode 100644 index 000000000..8cacc71d4 --- /dev/null +++ b/testdata/xray/jas-config/sast/run.py @@ -0,0 +1,15 @@ +from flask_webgoat import create_app + +app = create_app() + +@app.after_request +def add_csp_headers(response): + # vulnerability: Broken Access Control + response.headers['Access-Control-Allow-Origin'] = '*' + # vulnerability: Security Misconfiguration + response.headers['Content-Security-Policy'] = "script-src 'self' 'unsafe-inline'" + return response + +if __name__ == '__main__': + # vulnerability: Security Misconfiguration + app.run(debug=True) diff --git a/testdata/xray/jas-config/secrets/more_secrets/key b/testdata/xray/jas-config/secrets/more_secrets/key new file mode 100644 index 000000000..20edd3206 --- /dev/null +++ b/testdata/xray/jas-config/secrets/more_secrets/key @@ -0,0 +1,7 @@ +key:S3cr3t_K +Key:S3cr3t_K +key2:S3cr3t_K +search-key:S3cr3t_K +public-key:S3cr3t_K +publicKey:S3cr3t_K +KEY_public-key:S3cr3t_K diff --git a/testdata/xray/jas-config/secrets/more_secrets/sequence b/testdata/xray/jas-config/secrets/more_secrets/sequence new file mode 100644 index 000000000..1137d2d9e --- /dev/null +++ b/testdata/xray/jas-config/secrets/more_secrets/sequence @@ -0,0 +1,2 @@ +password: kmlkfxdngklfnl111111 +password: jnvkjcxnjvxnvk22222 \ No newline at end of file diff --git a/testdata/xray/jas-config/secrets/secret_generic/blacklist b/testdata/xray/jas-config/secrets/secret_generic/blacklist new file mode 100644 index 000000000..4ccd3ce0f --- /dev/null +++ b/testdata/xray/jas-config/secrets/secret_generic/blacklist @@ -0,0 +1,9 @@ +password=123456789 +password=0abcdef7 +common_token 81d7578788ec6b061029e875e01589xxxx2222e +API_KEY : DjyO8k7504YBl5lsGn2PDiV_testcABvnodnD +aws_access_key=ASIA2TEO6PMT6_test_4T4VM3J +aws_access_key=ASIA2TEO6PMT6_TEST_4T4VM3J +aws_access_key=ASIA2TEO6PMT6_sample_4T4VM3J +aws_access_key=ASIA2TEO6PMT6_sampLe_4T4VM3J +aws_access_key=ASIA2TEO6PMT6_example_4T4VM3J \ No newline at end of file diff --git a/testdata/xray/jas-config/secrets/secret_generic/gibberish b/testdata/xray/jas-config/secrets/secret_generic/gibberish new file mode 100644 index 000000000..bd7aef407 --- /dev/null +++ b/testdata/xray/jas-config/secrets/secret_generic/gibberish @@ -0,0 +1,10 @@ +secret_key=agwergartegbae123414 +client_secret 1557262894_291cd120eca3f3dcec3e77a66a2fc8c5_ +client_secret : QXfEexz9sVH-hTxsf7lq6P-LJhE +password:4fjNPloEVvrVj28BsVh3jI8t3pmv7Qg6 +client_secret=0FCEF92C008EC67540368586D10A3E09BC23D94F7D391A87CEC7650F1982C84A78175CBE3CAF98A3FC69EDD91B456CC1 +PAYMENT_TOKEN 381764678 +client_secret: nbhd-null +SECRET_KEY hgfu983ru6587yt10fjw83ft94847-3h4rgjh3yg490hg3745g4hr87gh5-276hg-9 +SECRET_KEY=12347502348 + diff --git a/testdata/xray/jas-test/requirements.txt b/testdata/xray/jas-test/requirements.txt index ddff94966..79bfd143b 100644 --- a/testdata/xray/jas-test/requirements.txt +++ b/testdata/xray/jas-test/requirements.txt @@ -1 +1,2 @@ -PyYAML==5.2 \ No newline at end of file +PyYAML==5.2 +Werkzeug==1.0.1 \ No newline at end of file diff --git a/testdata/xray/jas-test/sast/flask_webgoat/__init__.py b/testdata/xray/jas-test/sast/flask_webgoat/__init__.py new file mode 100644 index 000000000..9e2f505a6 --- /dev/null +++ b/testdata/xray/jas-test/sast/flask_webgoat/__init__.py @@ -0,0 +1,51 @@ +import os +import sqlite3 +from pathlib import Path + +from flask import Flask, g + +DB_FILENAME = "database.db" + + +def query_db(query, args=(), one=False, commit=False): + with sqlite3.connect(DB_FILENAME) as conn: + # vulnerability: Sensitive Data Exposure + conn.set_trace_callback(print) + cur = conn.cursor().execute(query, args) + if commit: + conn.commit() + return cur.fetchone() if one else cur.fetchall() + + +def create_app(): + app = Flask(__name__) + app.secret_key = "aeZ1iwoh2ree2mo0Eereireong4baitixaixu5Ee" + + db_path = Path(DB_FILENAME) + if db_path.exists(): + db_path.unlink() + + conn = sqlite3.connect(DB_FILENAME) + create_table_query = """CREATE TABLE IF NOT EXISTS user + (id INTEGER PRIMARY KEY, username TEXT, password TEXT, access_level INTEGER)""" + conn.execute(create_table_query) + + insert_admin_query = """INSERT INTO user (id, username, password, access_level) + VALUES (1, 'admin', 'admin', 0)""" + conn.execute(insert_admin_query) + conn.commit() + conn.close() + + with app.app_context(): + from . import actions + from . import auth + from . import status + from . import ui + from . import users + + app.register_blueprint(actions.bp) + app.register_blueprint(auth.bp) + app.register_blueprint(status.bp) + app.register_blueprint(ui.bp) + app.register_blueprint(users.bp) + return app diff --git a/testdata/xray/jas-test/sast/flask_webgoat/ui.py b/testdata/xray/jas-test/sast/flask_webgoat/ui.py new file mode 100644 index 000000000..2b0bd0608 --- /dev/null +++ b/testdata/xray/jas-test/sast/flask_webgoat/ui.py @@ -0,0 +1,25 @@ +import sqlite3 + +from flask import Blueprint, request, render_template +from . import query_db + +bp = Blueprint("ui", __name__) + + +@bp.route("/search") +def search(): + query_param = request.args.get("query") + if query_param is None: + message = "please provide the query parameter" + return render_template("error.html", message=message) + + try: + query = "SELECT username, access_level FROM user WHERE username LIKE ?;" + results = query_db(query, (query_param,)) + # vulnerability: XSS + return render_template( + "search.html", results=results, num_results=len(results), query=query_param + ) + except sqlite3.Error as err: + message = "Error while executing query " + query_param + ": " + err + return render_template("error.html", message=message) diff --git a/testdata/xray/jas-test/sast/result.sarif b/testdata/xray/jas-test/sast/result.sarif new file mode 100644 index 000000000..839f34816 --- /dev/null +++ b/testdata/xray/jas-test/sast/result.sarif @@ -0,0 +1,618 @@ +{ + "runs": [ + { + "tool": { + "driver": { + "name": "USAF", + "rules": [ + { + "id": "python-flask-debug", + "defaultConfiguration": { + "parameters": { + "properties": { + "CWE": "1295" + } + } + }, + "fullDescription": { + "text": "\n### Overview\nDebug mode in a Flask app is a feature that allows the developer to see detailed\nerror messages and tracebacks when an error occurs. This can be useful for debugging\nand troubleshooting, but it can also create a security vulnerability if the app is\ndeployed in debug mode. In debug mode, Flask will display detailed error messages and\ntracebacks to the user, even if the error is caused by malicious input.\nThis can provide attackers with valuable information about the app's internal workings\nand vulnerabilities, making it easier for them to exploit those vulnerabilities.\n\n### Query operation\nIn this query we look Flask applications that set the `debug` argument to `True`\n\n### Vulnerable example\n```python\nfrom flask import Flask\n\napp = Flask(__name__)\n\n@app.route('/')\ndef hello():\n return 'Hello, World!'\n\nif __name__ == '__main__':\n app.run(debug=True)\n```\nIn this example, the Flask application is set to run in debug mode by passing\n`debug=True` as an argument to the `app.run()` function. This will make the application\nemit potentially sensitive information to the users.\n\n### Remediation\nWhen using `app.run`, omit the `debug` flag or set it to `False` -\n```diff\nif __name__ == '__main__':\n- app.run(debug=True)\n+ app.run()\n```\n", + "markdown": "\n### Overview\nDebug mode in a Flask app is a feature that allows the developer to see detailed\nerror messages and tracebacks when an error occurs. This can be useful for debugging\nand troubleshooting, but it can also create a security vulnerability if the app is\ndeployed in debug mode. In debug mode, Flask will display detailed error messages and\ntracebacks to the user, even if the error is caused by malicious input.\nThis can provide attackers with valuable information about the app's internal workings\nand vulnerabilities, making it easier for them to exploit those vulnerabilities.\n\n### Query operation\nIn this query we look Flask applications that set the `debug` argument to `True`\n\n### Vulnerable example\n```python\nfrom flask import Flask\n\napp = Flask(__name__)\n\n@app.route('/')\ndef hello():\n return 'Hello, World!'\n\nif __name__ == '__main__':\n app.run(debug=True)\n```\nIn this example, the Flask application is set to run in debug mode by passing\n`debug=True` as an argument to the `app.run()` function. This will make the application\nemit potentially sensitive information to the users.\n\n### Remediation\nWhen using `app.run`, omit the `debug` flag or set it to `False` -\n```diff\nif __name__ == '__main__':\n- app.run(debug=True)\n+ app.run()\n```\n" + }, + "shortDescription": { + "text": "Flask Running in Debug" + } + }, + { + "id": "python-stack-trace-exposure", + "defaultConfiguration": { + "parameters": { + "properties": { + "CWE": "209" + } + } + }, + "fullDescription": { + "text": "\n### Overview\nStack trace exposure is a type of security vulnerability that occurs when a program reveals\nsensitive information, such as the names and locations of internal files and variables,\nin error messages or other diagnostic output. This can happen when a program crashes or\nencounters an error, and the stack trace (a record of the program's call stack at the time\nof the error) is included in the output. Stack trace exposure can provide attackers with\nvaluable information about a program's internal workings and vulnerabilities, making it\neasier for them to exploit those vulnerabilities and gain unauthorized access\nto the system.\n\n### Query operation\nIn this query we look for any stack trace information flowing into the output.\n\n### Vulnerable example\n```python\nimport traceback\n\ndef my_function():\n try:\n # Some code that may raise an exception\n raise ValueError('Something went wrong')\n except ValueError as e:\n traceback.print_tb(e.__traceback__)\n\nmy_function()\n```\nIn this example, the `my_function()` function intentionally raises\na `ValueError` exception.\nThe `traceback.print_tb()` function is then used to print the stack trace\nwhen the exception is caught. The vulnerability lies in using `traceback.print_tb()`\nto output the stack trace directly to the console or any other output stream.\nIf this code were part of a web application or exposed through an API,\nthe stack trace would be exposed in the server logs or potentially returned\nas part of an error response to the client.\n\n### Remediation\nLog the exception to a logging framework or file, instead of outputting directly to the\nconsole-\n\n```python\ndef log_exception(exception):\n logging.exception('An exception occurred', exc_info=exception)\n```\n\n```diff\ndef my_function():\n try:\n # Some code that may raise an exception\n raise ValueError('Something went wrong')\n except ValueError as e:\n- traceback.print_tb(e.__traceback__)\n+ log_exception(e)\n```\n", + "markdown": "\n### Overview\nStack trace exposure is a type of security vulnerability that occurs when a program reveals\nsensitive information, such as the names and locations of internal files and variables,\nin error messages or other diagnostic output. This can happen when a program crashes or\nencounters an error, and the stack trace (a record of the program's call stack at the time\nof the error) is included in the output. Stack trace exposure can provide attackers with\nvaluable information about a program's internal workings and vulnerabilities, making it\neasier for them to exploit those vulnerabilities and gain unauthorized access\nto the system.\n\n### Query operation\nIn this query we look for any stack trace information flowing into the output.\n\n### Vulnerable example\n```python\nimport traceback\n\ndef my_function():\n try:\n # Some code that may raise an exception\n raise ValueError('Something went wrong')\n except ValueError as e:\n traceback.print_tb(e.__traceback__)\n\nmy_function()\n```\nIn this example, the `my_function()` function intentionally raises\na `ValueError` exception.\nThe `traceback.print_tb()` function is then used to print the stack trace\nwhen the exception is caught. The vulnerability lies in using `traceback.print_tb()`\nto output the stack trace directly to the console or any other output stream.\nIf this code were part of a web application or exposed through an API,\nthe stack trace would be exposed in the server logs or potentially returned\nas part of an error response to the client.\n\n### Remediation\nLog the exception to a logging framework or file, instead of outputting directly to the\nconsole-\n\n```python\ndef log_exception(exception):\n logging.exception('An exception occurred', exc_info=exception)\n```\n\n```diff\ndef my_function():\n try:\n # Some code that may raise an exception\n raise ValueError('Something went wrong')\n except ValueError as e:\n- traceback.print_tb(e.__traceback__)\n+ log_exception(e)\n```\n" + }, + "shortDescription": { + "text": "Stack Trace Exposure" + } + }, + { + "id": "python-xss", + "defaultConfiguration": { + "parameters": { + "properties": { + "CWE": "79" + } + } + }, + "fullDescription": { + "text": "\n### Overview\nXSS, or Cross-Site Scripting, is a type of vulnerability that allows an attacker to\ninject malicious code into a website or web application.\nThis can allow the attacker to steal sensitive information from users, such as their\ncookies or login credentials, or to perform unauthorized actions on their behalf.\n\n### Query operation\nIn the query we look for any user input that flows into\na potential output of the application.\n\n### Vulnerable example\nIn the following example, the Flask application takes a user-supplied parameter (`name`)\nfrom the query string and renders it directly into an HTML template using the\n`render_template_string` function. The issue is that\nthe user input is not properly sanitized or escaped, making it vulnerable to XSS attacks.\n```python\nfrom flask import Flask, request, render_template_string\n\napp = Flask(__name__)\n\n@app.route('/')\ndef index():\n name = request.args.get('name', 'Guest')\n message = f'Hello, {name}!'\n return render_template_string('

{}

'.format(message))\n\nif __name__ == '__main__':\napp.run()\n```\nAn attacker can exploit this vulnerability by injecting malicious JavaScript code into the\n`name` parameter. For instance, they could modify the URL to include the following payload:\n`http://localhost:5000/?name=`\n\n### Remediation\nWhen rendering templates, use parametrized variable assignments (which are automatically\nescaped) instead of direct string manipulation -\n```diff\n@app.route('/')\ndef index():\n name = request.args.get('name', 'Guest')\n message = f'Hello, {name}!'\n- return render_template_string('

{}

'.format(message))\n+ return render_template_string('

{{ message }}

', message=message)\n```\n", + "markdown": "\n### Overview\nXSS, or Cross-Site Scripting, is a type of vulnerability that allows an attacker to\ninject malicious code into a website or web application.\nThis can allow the attacker to steal sensitive information from users, such as their\ncookies or login credentials, or to perform unauthorized actions on their behalf.\n\n### Query operation\nIn the query we look for any user input that flows into\na potential output of the application.\n\n### Vulnerable example\nIn the following example, the Flask application takes a user-supplied parameter (`name`)\nfrom the query string and renders it directly into an HTML template using the\n`render_template_string` function. The issue is that\nthe user input is not properly sanitized or escaped, making it vulnerable to XSS attacks.\n```python\nfrom flask import Flask, request, render_template_string\n\napp = Flask(__name__)\n\n@app.route('/')\ndef index():\n name = request.args.get('name', 'Guest')\n message = f'Hello, {name}!'\n return render_template_string('

{}

'.format(message))\n\nif __name__ == '__main__':\napp.run()\n```\nAn attacker can exploit this vulnerability by injecting malicious JavaScript code into the\n`name` parameter. For instance, they could modify the URL to include the following payload:\n`http://localhost:5000/?name=`\n\n### Remediation\nWhen rendering templates, use parametrized variable assignments (which are automatically\nescaped) instead of direct string manipulation -\n```diff\n@app.route('/')\ndef index():\n name = request.args.get('name', 'Guest')\n message = f'Hello, {name}!'\n- return render_template_string('

{}

'.format(message))\n+ return render_template_string('

{{ message }}

', message=message)\n```\n" + }, + "shortDescription": { + "text": "XSS Vulnerability" + } + } + ] + } + }, + "invocations": [ + { + "executionSuccessful": true, + "arguments": [ + "/Users/assafa/.jfrog/dependencies/analyzerManager/zd_scanner/scanner", + "scan", + "/var/folders/xv/th4cksxn7jv9wjrdnn1h4tj00000gq/T/jfrog.cli.temp.-1693492973-1963413933/results.sarif" + ], + "workingDirectory": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast" + } + } + ], + "results": [ + { + "message": { + "text": "Stack Trace Exposure" + }, + "level": "note", + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.__init__.query_db" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/__init__.py" + }, + "region": { + "endColumn": 39, + "endLine": 13, + "snippet": { + "text": "conn.set_trace_callback(print)" + }, + "startColumn": 9, + "startLine": 13 + } + } + } + ], + "ruleId": "python-stack-trace-exposure" + }, + { + "message": { + "text": "Stack Trace Exposure" + }, + "level": "note", + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.__init__.query_db" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/__init__.py" + }, + "region": { + "endColumn": 39, + "endLine": 13, + "snippet": { + "text": "conn.set_trace_callback(print)" + }, + "startColumn": 9, + "startLine": 13 + } + } + } + ], + "ruleId": "python-stack-trace-exposure" + }, + { + "message": { + "text": "XSS Vulnerability" + }, + "codeFlows": [ + { + "threadFlows": [ + { + "locations": [ + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 31, + "endLine": 11, + "snippet": { + "text": "request.args" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 35, + "endLine": 11, + "snippet": { + "text": "request.args.get" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 44, + "endLine": 11, + "snippet": { + "text": "request.args.get(\"query\")" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 16, + "endLine": 11, + "snippet": { + "text": "query_param" + }, + "startColumn": 5, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 10, + "endLine": 22, + "snippet": { + "text": "render_template(\n \"search.html\", results=results, num_results=len(results), query=query_param\n )" + }, + "startColumn": 16, + "startLine": 20 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 10, + "endLine": 22, + "snippet": { + "text": "return render_template(\n \"search.html\", results=results, num_results=len(results), query=query_param\n )" + }, + "startColumn": 9, + "startLine": 20 + } + } + } + } + ] + } + ] + } + ], + "level": "error", + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 10, + "endLine": 22, + "snippet": { + "text": "return render_template(\n \"search.html\", results=results, num_results=len(results), query=query_param\n )" + }, + "startColumn": 9, + "startLine": 20 + } + } + } + ], + "ruleId": "python-xss" + }, + { + "message": { + "text": "XSS Vulnerability" + }, + "codeFlows": [ + { + "threadFlows": [ + { + "locations": [ + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 31, + "endLine": 11, + "snippet": { + "text": "request.args" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 35, + "endLine": 11, + "snippet": { + "text": "request.args.get" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 44, + "endLine": 11, + "snippet": { + "text": "request.args.get(\"query\")" + }, + "startColumn": 19, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 16, + "endLine": 11, + "snippet": { + "text": "query_param" + }, + "startColumn": 5, + "startLine": 11 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 63, + "endLine": 24, + "snippet": { + "text": "\"Error while executing query \" + query_param" + }, + "startColumn": 19, + "startLine": 24 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 70, + "endLine": 24, + "snippet": { + "text": "\"Error while executing query \" + query_param + \": \"" + }, + "startColumn": 19, + "startLine": 24 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 76, + "endLine": 24, + "snippet": { + "text": "\"Error while executing query \" + query_param + \": \" + err" + }, + "startColumn": 19, + "startLine": 24 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 16, + "endLine": 24, + "snippet": { + "text": "message" + }, + "startColumn": 9, + "startLine": 24 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 62, + "endLine": 25, + "snippet": { + "text": "render_template(\"error.html\", message=message)" + }, + "startColumn": 16, + "startLine": 25 + } + } + } + }, + { + "location": { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 62, + "endLine": 25, + "snippet": { + "text": "return render_template(\"error.html\", message=message)" + }, + "startColumn": 9, + "startLine": 25 + } + } + } + } + ] + } + ] + } + ], + "level": "error", + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "flask_webgoat.ui.search" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/flask_webgoat/ui.py" + }, + "region": { + "endColumn": 62, + "endLine": 25, + "snippet": { + "text": "return render_template(\"error.html\", message=message)" + }, + "startColumn": 9, + "startLine": 25 + } + } + } + ], + "ruleId": "python-xss" + }, + { + "message": { + "text": "Flask Running in Debug" + }, + "locations": [ + { + "logicalLocations": [ + { + "fullyQualifiedName": "run" + } + ], + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/assafa/Documents/code/cli-projects/jfrog-cli/testdata/xray/jas/sast/run.py" + }, + "region": { + "endColumn": 24, + "endLine": 15, + "snippet": { + "text": "app.run(debug=True)" + }, + "startColumn": 5, + "startLine": 15 + } + } + } + ], + "ruleId": "python-flask-debug" + } + ] + } + ], + "version": "2.1.0", + "$schema": "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos02/schemas/sarif-schema-2.1.0.json" +} \ No newline at end of file diff --git a/testdata/xray/jas-test/sast/run.py b/testdata/xray/jas-test/sast/run.py new file mode 100644 index 000000000..8cacc71d4 --- /dev/null +++ b/testdata/xray/jas-test/sast/run.py @@ -0,0 +1,15 @@ +from flask_webgoat import create_app + +app = create_app() + +@app.after_request +def add_csp_headers(response): + # vulnerability: Broken Access Control + response.headers['Access-Control-Allow-Origin'] = '*' + # vulnerability: Security Misconfiguration + response.headers['Content-Security-Policy'] = "script-src 'self' 'unsafe-inline'" + return response + +if __name__ == '__main__': + # vulnerability: Security Misconfiguration + app.run(debug=True) diff --git a/transfer_test.go b/transfer_test.go index 40c735552..c748afadf 100644 --- a/transfer_test.go +++ b/transfer_test.go @@ -493,12 +493,12 @@ func updateDockerRepoParams(t *testing.T, targetServicesManager artifactory.Arti params.AllowAnyHostAuth = inverseBooleanPointer(params.AllowAnyHostAuth) params.EnableCookieManagement = inverseBooleanPointer(params.EnableCookieManagement) params.BypassHeadRequests = inverseBooleanPointer(params.BypassHeadRequests) - params.SocketTimeoutMillis += 100 - params.RetrievalCachePeriodSecs += 100 - params.MetadataRetrievalTimeoutSecs += 100 - params.MissedRetrievalCachePeriodSecs += 100 - params.UnusedArtifactsCleanupPeriodHours += 100 - params.AssumedOfflinePeriodSecs += 100 + *params.SocketTimeoutMillis += 100 + *params.RetrievalCachePeriodSecs += 100 + *params.MetadataRetrievalTimeoutSecs += 100 + *params.MissedRetrievalCachePeriodSecs += 100 + *params.UnusedArtifactsCleanupPeriodHours += 100 + *params.AssumedOfflinePeriodSecs += 100 params.Username = "test123" params.ContentSynchronisation.Enabled = inverseBooleanPointer(params.ContentSynchronisation.Enabled) @@ -546,17 +546,16 @@ func createTestProject(t *testing.T) func() error { deleteProjectIfExists(t, accessManager, tests.ProjectKey) // Create new project - falseValue := false adminPrivileges := accessServices.AdminPrivileges{ - ManageMembers: &falseValue, - ManageResources: &falseValue, - IndexResources: &falseValue, + ManageMembers: utils.Pointer(false), + ManageResources: utils.Pointer(false), + IndexResources: utils.Pointer(false), } projectDetails := accessServices.Project{ DisplayName: tests.ProjectKey + "MyProject", Description: "My Test Project", AdminPrivileges: &adminPrivileges, - SoftLimit: &falseValue, + SoftLimit: utils.Pointer(false), StorageQuotaBytes: 1073741825, ProjectKey: tests.ProjectKey, } @@ -570,10 +569,9 @@ func createTestProject(t *testing.T) func() error { } func updateProjectParams(t *testing.T, projectParams *accessServices.Project, targetAccessManager *access.AccessServicesManager) { - trueValue := true projectParams.Description = "123123123123" - projectParams.AdminPrivileges.IndexResources = &trueValue - projectParams.SoftLimit = &trueValue + projectParams.AdminPrivileges.IndexResources = utils.Pointer(true) + projectParams.SoftLimit = utils.Pointer(true) projectParams.StorageQuotaBytes += 1 assert.NoError(t, targetAccessManager.UpdateProject(accessServices.ProjectParams{ProjectDetails: *projectParams})) } diff --git a/utils/cliutils/cli_consts.go b/utils/cliutils/cli_consts.go index 4587ff607..2b88e08a5 100644 --- a/utils/cliutils/cli_consts.go +++ b/utils/cliutils/cli_consts.go @@ -4,7 +4,7 @@ import "time" const ( // General CLI constants - CliVersion = "2.47.0" + CliVersion = "2.48.0" ClientAgent = "jfrog-cli-go" // CLI base commands constants: diff --git a/xray_test.go b/xray_test.go index 8137e826f..e4df98816 100644 --- a/xray_test.go +++ b/xray_test.go @@ -416,7 +416,7 @@ func TestXrayAuditMultiProjects(t *testing.T) { defer cleanTestsHomeEnv() output := xrayCli.WithoutCredentials().RunCliCmdWithOutput(t, "audit", "--format="+string(utils.SimpleJson), workingDirsFlag) verifySimpleJsonScanResults(t, output, 35, 0) - verifySimpleJsonJasResults(t, output, 9, 7, 0, 1) + verifySimpleJsonJasResults(t, output, 3, 9, 7, 3) } func TestXrayAuditPipJson(t *testing.T) { @@ -695,7 +695,6 @@ func runDockerScan(t *testing.T, imageName, watchName string, minViolations, min } func createTestWatch(t *testing.T) (string, func()) { - trueValue := true xrayManager, err := utils.CreateXrayServiceManager(xrayDetails) assert.NoError(t, err) // Create new default policy. @@ -707,7 +706,7 @@ func createTestWatch(t *testing.T) (string, func()) { Criteria: *xrayUtils.CreateSeverityPolicyCriteria(xrayUtils.Low), Priority: 1, Actions: &xrayUtils.PolicyAction{ - FailBuild: &trueValue, + FailBuild: clientUtils.Pointer(true), }, }}, } @@ -750,13 +749,18 @@ func TestXrayOfflineDBSyncV3(t *testing.T) { func TestXrayAuditJasSimpleJson(t *testing.T) { output := testXrayAuditJas(t, string(utils.SimpleJson), "jas-test") - verifySimpleJsonJasResults(t, output, 9, 7, 2, 1) + verifySimpleJsonJasResults(t, output, 3, 9, 7, 2) +} + +func TestXrayAuditJasSimpleJsonWithConfig(t *testing.T) { + output := testXrayAuditJas(t, string(utils.SimpleJson), "jas-config") + verifySimpleJsonJasResults(t, output, 0, 0, 1, 2) } func TestXrayAuditJasNoViolationsSimpleJson(t *testing.T) { output := testXrayAuditJas(t, string(utils.SimpleJson), "npm") verifySimpleJsonScanResults(t, output, 2, 0) - verifySimpleJsonJasResults(t, output, 0, 0, 0, 1) + verifySimpleJsonJasResults(t, output, 0, 0, 0, 0) } func testXrayAuditJas(t *testing.T, format string, project string) string { @@ -776,10 +780,11 @@ func testXrayAuditJas(t *testing.T, format string, project string) string { return xrayCli.WithoutCredentials().RunCliCmdWithOutput(t, "audit", "--format="+format) } -func verifySimpleJsonJasResults(t *testing.T, content string, minIacViolations, minSecrets, minApplicable, minNotApplicable int) { +func verifySimpleJsonJasResults(t *testing.T, content string, minSastViolations, minIacViolations, minSecrets, minApplicable int) { var results formats.SimpleJsonResults err := json.Unmarshal([]byte(content), &results) if assert.NoError(t, err) { + assert.GreaterOrEqual(t, len(results.Sast), minSastViolations, "Found less sast then expected") assert.GreaterOrEqual(t, len(results.Secrets), minSecrets, "Found less secrets then expected") assert.GreaterOrEqual(t, len(results.Iacs), minIacViolations, "Found less IaC then expected") var applicableResults, notApplicableResults int @@ -791,7 +796,7 @@ func verifySimpleJsonJasResults(t *testing.T, content string, minIacViolations, } } assert.GreaterOrEqual(t, applicableResults, minApplicable, "Found less applicableResults then expected") - assert.GreaterOrEqual(t, notApplicableResults, minNotApplicable, "Found less notApplicableResults then expected") + assert.GreaterOrEqual(t, notApplicableResults, 1, "Found less notApplicableResults then expected") } }