From 6927894a2106f36eb695d1a412d99816e928ee3e Mon Sep 17 00:00:00 2001 From: py Date: Thu, 28 Nov 2024 20:26:35 +0530 Subject: [PATCH] wip: packaging code --- .github/dependabot.yml | 12 + .github/pull-request-template.md | 9 + .github/workflows/cargo-build.yaml | 44 ++ .github/workflows/cargo-test.yaml | 89 +++ .github/workflows/check-format.yaml | 36 ++ .github/workflows/nix-check.yaml | 51 ++ .github/workflows/ship.yaml | 249 +++++++ changelog.md | 24 + ci/deploy.sh | 139 ++++ crates/configuration/src/version1.rs | 4 +- crates/query-engine/execution/src/query.rs | 20 +- docker-compose.yaml | 46 ++ flake.lock | 97 +++ flake.nix | 148 +++++ justfile | 233 +++++++ metrics/grafana/dashboard.yaml | 12 + metrics/grafana/dashboards/postgres-ndc.json | 643 +++++++++++++++++++ metrics/grafana/datasource.yml | 9 + metrics/prometheus/prometheus.yml | 30 + nix/app.nix | 56 ++ nix/docker.nix | 47 ++ nix/rust.nix | 59 ++ rust-toolchain.toml | 4 + scripts/generate-chinook-configuration.sh | 52 ++ scripts/release-notes.py | 64 ++ scripts/wait-until | 39 ++ static/configuration.json | 366 +++++++++++ static/temp-deploys/.gitkeep | 0 28 files changed, 2567 insertions(+), 15 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .github/pull-request-template.md create mode 100644 .github/workflows/cargo-build.yaml create mode 100644 .github/workflows/cargo-test.yaml create mode 100644 .github/workflows/check-format.yaml create mode 100644 .github/workflows/nix-check.yaml create mode 100644 .github/workflows/ship.yaml create mode 100644 changelog.md create mode 100644 ci/deploy.sh create mode 100644 docker-compose.yaml create mode 100644 flake.lock create mode 100644 flake.nix create mode 100644 justfile create mode 100644 metrics/grafana/dashboard.yaml create mode 100644 metrics/grafana/dashboards/postgres-ndc.json create mode 100644 metrics/grafana/datasource.yml create mode 100644 metrics/prometheus/prometheus.yml create mode 100644 nix/app.nix create mode 100644 nix/docker.nix create mode 100644 nix/rust.nix create mode 100644 rust-toolchain.toml create mode 100755 scripts/generate-chinook-configuration.sh create mode 100755 scripts/release-notes.py create mode 100755 scripts/wait-until create mode 100644 static/configuration.json create mode 100644 static/temp-deploys/.gitkeep diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..973ac41 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +version: 2 + +updates: + - package-ecosystem: "cargo" + directory: "/" + schedule: + interval: "weekly" + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/pull-request-template.md b/.github/pull-request-template.md new file mode 100644 index 0000000..6bc927e --- /dev/null +++ b/.github/pull-request-template.md @@ -0,0 +1,9 @@ + + +### What + + + +### How + + diff --git a/.github/workflows/cargo-build.yaml b/.github/workflows/cargo-build.yaml new file mode 100644 index 0000000..2d94855 --- /dev/null +++ b/.github/workflows/cargo-build.yaml @@ -0,0 +1,44 @@ +on: [push] + +name: DynamoDB NDC build + +jobs: + build: + name: Build DynamoDB NDC + runs-on: ubuntu-latest + env: + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + RUSTFLAGS: "-D warnings" # fail on warnings + steps: + - uses: actions/checkout@v4 + + - name: install protoc + uses: arduino/setup-protoc@v2 + with: + version: "23.3" + + - name: install tools + run: | + rustup show + + - uses: Swatinem/rust-cache@v2 + + - name: build crates + run: | + cargo build --locked --release --all-features + + - name: lint + run: | + cargo clippy --release -- --deny=clippy::all + + # scream into Slack if something goes wrong + - name: report status + if: github.ref == 'refs/heads/main' + uses: ravsamhq/notify-slack-action@v2 + with: + status: ${{ job.status }} + notify_when: failure + notification_title: "😧 Error on <{repo_url}|{repo}>" + message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/cargo-test.yaml b/.github/workflows/cargo-test.yaml new file mode 100644 index 0000000..6940dea --- /dev/null +++ b/.github/workflows/cargo-test.yaml @@ -0,0 +1,89 @@ +# on: [push] + +# name: BigQuery NDC tests + +# jobs: +# test-query-engine: +# name: Test query-engine +# runs-on: ubuntu-latest +# env: +# CARGO_NET_GIT_FETCH_WITH_CLI: "true" +# RUSTFLAGS: "-D warnings" # fail on warnings +# steps: +# - uses: actions/checkout@v4 + +# - name: install tools +# run: | +# rustup show + +# - uses: Swatinem/rust-cache@v2 + +# - name: run tests +# run: | +# cargo test --release -p query-engine-translation +# env: +# RUST_LOG: INFO + +# # scream into Slack if something goes wrong +# - name: report status +# if: github.ref == 'refs/heads/main' +# uses: ravsamhq/notify-slack-action@v2 +# with: +# status: ${{ job.status }} +# notify_when: failure +# notification_title: "😧 Error on <{repo_url}|{repo}>" +# message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" +# env: +# SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} + +# test-bigquery-ndc: +# name: Test ndc-bigquery +# runs-on: ubuntu-latest +# env: +# CARGO_NET_GIT_FETCH_WITH_CLI: "true" +# RUSTFLAGS: "-D warnings" # fail on warnings + +# steps: +# - uses: actions/checkout@v4 + +# - name: install protoc +# uses: arduino/setup-protoc@v2 +# with: +# version: "23.3" + +# - name: install tools +# run: | +# rustup show + +# - uses: Swatinem/rust-cache@v2 +# with: +# shared-key: "build" # share the cache across jobs + +# # - uses: hoverkraft-tech/compose-action@v1.5.1 +# # with: +# # compose-file: "./docker-compose.yaml" +# # up-flags: "--pull=always --wait" +# # down-flags: "--volumes" +# # services: | +# # bigquery + +# - name: run tests +# run: | +# cargo test --release --workspace +# env: +# HASURA_BIGQUERY_SERVICE_KEY: "{\"type\": \"service_account\",\"project_id\": \"hasura-development\",\"private_key_id\": \"222dd3f9e98b6743bb8d74d7a126fe89e6ac221d\",\"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDZuxyxWk6bOxHr\\nht+MPZ7Q+F4D7AzTqCTZOmcldod+KHMlUCKwIOLQabAO8TEPhvcYyzBQ4gCwoN2i\\n7VoQbHmlTpQu1s43K25oIoEIicFTbHcL4MALiFnT44XYl+PxL+e//GibJYGjwqI+\\n3o2Go//o8BOfQEO/PPiQdub8/4VQjXGE0+xLeUjYURiJPu8ojrL2FdIqvzMaDWKq\\n2r6KuukIeWkqmt6fHnrGiseavg3g7pBPjqmRtX6ekY74XbkTQk1kmCKf9MLjZ1UI\\n+8QNp1C4pO4eDbp1Zkqz3uHhzccUvStkSCmppjKfD64Tp+6ExbUnMfq1UJ0GJBDM\\nVPeJF6+PAgMBAAECggEAFFSf88GKeH02CaeQ5S/1ze57HOOkOPlI443MBtgAA9w0\\nEEZgztBrTWmo+mQ0IA6KsSJ78vl/df63Y1jFYaY3X6OsO4lsPQONriSWptzyE9+b\\naB0G4azMMnhazaQ1MRa3jZo8jEwexFNOwg8W6P0UTsRoGKUwDkHbteWcYQBdCu3W\\nFa/CX3Tw0n/DdAVNi8Ai9K0d+Okmcv+ZRopeNuLENR28/VGSXj+Li1V7A0s+nX9E\\nyxuGrDY4WMxSXHkW2yjrDnPUs6dXLFk1HBQPaHrs3i6gGyNXfTNWUJ3nGQwZIqJI\\na1b4TMiGVapq33qCo/3Yi6jQ+I6KnpmWgQ7y5LXhoQKBgQDuA80oWCXQv7MERg91\\nFwammtXrMjoD234u3RGNtnU67yH87kvL+p18EiNlbmy+CWyoc1mOjLtTHvMBfMGh\\nfKt3BSuzrZZArA1GJF6J2Rew5dkJGzwPogLSnXMgrVwknAejKJw97wTJzzIZuuSc\\nb7P57+mFoSdR+eSb44WFcuMyoQKBgQDqLu9LWz+LcljDWDeMQ4kl8gkNZMe5//Qd\\nOpa6mN6T2nfRgxasaLo7WO8TqT4X28eBJKuru4BOeHVx0Y8GvWYyaW0uEEycdXVl\\n6man+YUhZezTjjB/nCeaz7E7LCcUao1JP2Y9xlnpO5jdyi2tYkCqu7vOxmnLArN/\\nl3zuXgrkLwKBgEzCzReF1ixMpt9p+PI6StrQdM01laBI2ZkjktWxUn1/Qebgs3FF\\nkiTBdMjxpABl6bUp/mgK2x8jjBuesJP0MRhhgoagJSUWV/GXKSYr7YgPmL9nGSex\\niFeEj+yp/F2SNKRaJImU3GZ5fB7wN2p8W/7vcNC3+IZnoWLlLdqsAroBAoGAdzZh\\nVoki9gfFq9uym1Kd9JUbipftHIBxcpeqt16un7GtIRiMaEP/2cpSGj4jf92/17wl\\nMA0JKekkUEaPeqzb43nLvJFLjrI0iyciDwx0eyX5w1A03CFP//0OicLWOgxr1AfU\\nMkpQ5uwRy4XqbsL/jGp5Fq/mlxPO8HrbfDSfcr0CgYEAxN/RMCYODz+p9xZ6tbiS\\nfHFrCgvPpYR9hEWhb/DyT4Q/OSzk0TItuSXGc3uicYeIycHIndyWej/a1HGg0IRK\\nqjGbqGvRJIrzhLvLog1oOGADFSE2IJrxV2m9lQG8IUow4QUFcoZaCXZAQEvWeo+D\\nq+4Pe2w4aMZeyqpt/mOSGzQ=\\n-----END PRIVATE KEY-----\\n\",\"client_email\": \"skm-bq-test@hasura-development.iam.gserviceaccount.com\",\"client_id\": \"116460406056940511807\",\"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\",\"token_uri\": \"https://oauth2.googleapis.com/token\",\"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\",\"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/skm-bq-test%40hasura-development.iam.gserviceaccount.com\",\"universe_domain\": \"googleapis.com\"}" +# HASURA_BIGQUERY_PROJECT_ID: "hasura-development" +# HASURA_BIGQUERY_DATASET_ID: "chinook_sample" +# RUST_LOG: INFO + +# # scream into Slack if something goes wrong +# - name: Report Status +# if: github.ref == 'refs/heads/main' +# uses: ravsamhq/notify-slack-action@v2 +# with: +# status: ${{ job.status }} +# notify_when: failure +# notification_title: "😧 Error on <{repo_url}|{repo}>" +# message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" +# env: +# SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/check-format.yaml b/.github/workflows/check-format.yaml new file mode 100644 index 0000000..539c296 --- /dev/null +++ b/.github/workflows/check-format.yaml @@ -0,0 +1,36 @@ +name: DynamoDB NDC format + +on: + push: + +jobs: + cargo-fmt: + name: check formatting with cargo fmt + runs-on: ubuntu-latest + env: + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + RUSTFLAGS: "-D warnings" # fail on warnings + steps: + - uses: actions/checkout@v4 + + - name: install tools + run: | + rustup show + + - uses: Swatinem/rust-cache@v2 + + - name: check formatting + run: | + cargo fmt --all --check + + # scream into Slack if something goes wrong + - name: Report Status + if: github.ref == 'refs/heads/main' + uses: ravsamhq/notify-slack-action@v2 + with: + status: ${{ job.status }} + notify_when: failure + notification_title: "😧 Error on <{repo_url}|{repo}>" + message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/nix-check.yaml b/.github/workflows/nix-check.yaml new file mode 100644 index 0000000..848ed51 --- /dev/null +++ b/.github/workflows/nix-check.yaml @@ -0,0 +1,51 @@ +name: DynamoDB NDC nix check + +on: + push: + +jobs: + nix-flake-check: + name: nix flake check + runs-on: ubuntu-latest + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v4 + + - name: Install Nix ❄ + uses: cachix/install-nix-action@V28 + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up the Nix Cache 🔌 + uses: cachix/cachix-action@v15 + with: + name: hasura-v3-dev + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + useDaemon: false # attempt to stop hanging on cleanup + + - name: nix flake check + run: | + nix flake check --print-build-logs + + nix-develop: + name: nix develop + runs-on: ubuntu-latest + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v4 + + - name: Install Nix ❄ + uses: cachix/install-nix-action@V28 + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up the Nix Cache 🔌 + uses: cachix/cachix-action@v15 + with: + name: hasura-v3-dev + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + useDaemon: false # attempt to stop hanging on cleanup + + - name: nix develop --command true + run: | + nix develop --print-build-logs --command true diff --git a/.github/workflows/ship.yaml b/.github/workflows/ship.yaml new file mode 100644 index 0000000..f72856f --- /dev/null +++ b/.github/workflows/ship.yaml @@ -0,0 +1,249 @@ +name: ship + +on: + push: + +jobs: + nix-build: + name: nix build + runs-on: ubuntu-latest + strategy: + matrix: + target: + - x86_64-linux + - aarch64-linux + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v4 + + - name: Install Nix ❄ + uses: cachix/install-nix-action@V28 + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up the Nix Cache 🔌 + uses: cachix/cachix-action@v15 + with: + name: hasura-v3-dev + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + useDaemon: false # attempt to stop hanging on cleanup + + - name: Build a binary with Nix + run: nix build --print-build-logs '.#${{ matrix.target }}' + + - name: Build a Docker image with Nix + run: nix build --print-build-logs '.#docker-${{ matrix.target }}' + + # scream into Slack if something goes wrong + - name: Report Status + if: always() && github.ref == 'refs/heads/main' + uses: ravsamhq/notify-slack-action@v2 + with: + status: ${{ job.status }} + notify_when: failure + notification_title: "😧 Error on <{repo_url}|{repo}>" + message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} + + push-docker-images: + name: push Docker images + needs: + - nix-build + runs-on: ubuntu-latest + # Only run on the `main` branch or version tags. + # Note we currently tag the image with 'latest', so will want to stop doing + # so if we run this on PR branches, etc. + if: (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')) + permissions: + contents: read + id-token: write + packages: write + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v4 + + - name: Install Nix ❄ + uses: cachix/install-nix-action@V28 + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up the Nix Cache 🔌 + uses: cachix/cachix-action@v15 + with: + name: hasura-v3-dev + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + useDaemon: false # attempt to stop hanging on cleanup + + - id: gcloud-auth + name: Authenticate to Google Cloud 🔑 + uses: google-github-actions/auth@v2 + with: + token_format: access_token + service_account: "hasura-ci-docker-writer@hasura-ddn.iam.gserviceaccount.com" + workload_identity_provider: "projects/1025009031284/locations/global/workloadIdentityPools/hasura-ddn/providers/github" + + - name: Login to Google Container Registry 📦 + uses: "docker/login-action@v3" + with: + registry: "us-docker.pkg.dev" + username: "oauth2accesstoken" + password: "${{ steps.gcloud-auth.outputs.access_token }}" + + - name: Login to GitHub Container Registry 📦 + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Push Docker images to Google Container Registry 🚢 + run: nix run .#publish-docker-image '${{ github.ref }}' 'us-docker.pkg.dev/hasura-ddn/ddn/ndc-dynamodb' + + - name: Push Docker images to GitHub Packages 🚢 + run: nix run .#publish-docker-image '${{ github.ref }}' 'ghcr.io/hasura/ndc-dynamodb' + + # scream into Slack if something goes wrong + - name: Report Status + if: always() + uses: ravsamhq/notify-slack-action@v2 + with: + status: ${{ job.status }} + notify_when: failure + notification_title: "😧 Error on <{repo_url}|{repo}>" + message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} + + build-cli-binaries: + name: build the CLI binaries + strategy: + matrix: + include: + - runner: ubuntu-20.04 + target: x86_64-unknown-linux-gnu + - runner: ubuntu-20.04 + target: aarch64-unknown-linux-gnu + linux-packages: gcc-aarch64-linux-gnu + linker: /usr/bin/aarch64-linux-gnu-gcc + - runner: macos-latest + target: x86_64-apple-darwin + - runner: macos-latest + target: aarch64-apple-darwin + - runner: windows-latest + target: x86_64-pc-windows-msvc + extension: .exe + extra-rust-flags: "-C target-feature=+crt-static" + runs-on: ${{ matrix.runner }} + env: + CARGO_BUILD_TARGET: ${{ matrix.target }} + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + RUSTFLAGS: "-D warnings" # fail on warnings + defaults: + run: + shell: bash + steps: + - uses: actions/checkout@v4 + + - name: install protoc + uses: arduino/setup-protoc@v3 + with: + version: "25.x" + repo-token: ${{ secrets.GITHUB_TOKEN }} + + - name: install tools + run: | + rustup show + rustup target add ${{ matrix.target }} + + - name: install other packages required + if: matrix.linux-packages + run: | + sudo apt-get update + sudo apt-get install -y ${{ matrix.linux-packages }} + + - uses: Swatinem/rust-cache@v2 + with: + shared-key: "build-${matrix.runner}" # share the cache across jobs + + - name: build the CLI + run: | + # If we're on a tag, use the tag name as the release version. + if [[ "$GITHUB_REF_TYPE" == 'tag' ]]; then + # Ensure that the version specified in Cargo.toml is the same as the tag (with a 'v' prefix). + CARGO_VERSION="$(cargo metadata --format-version=1 | jq -r '.packages | .[] | select(.name == "ndc-dynamodb") | .version')" + echo "Git tag: ${GITHUB_REF_NAME}" + echo "Cargo version: ${CARGO_VERSION}" + + if [[ "$GITHUB_REF_NAME" != "v${CARGO_VERSION}" ]]; then + echo >&2 "The Git tag is \"${GITHUB_REF_NAME}\", but the version in Cargo.toml is \"${CARGO_VERSION}\"." + echo >&2 'These must be the same, with a "v" prefix for the tag. Aborting.' + exit 1 + fi + export RELEASE_VERSION="$GITHUB_REF_NAME" + echo "RELEASE_VERSION = ${RELEASE_VERSION}" + fi + + if [[ -n '${{ matrix.linker }}' ]]; then + TARGET_SCREAMING="$(echo '${{ matrix.target }}' | tr '[:lower:]' '[:upper:]' | tr '-' '_')" + echo "CARGO_TARGET_${TARGET_SCREAMING}_LINKER"='${{ matrix.linker }}' + declare "CARGO_TARGET_${TARGET_SCREAMING}_LINKER"='${{ matrix.linker }}' + export "CARGO_TARGET_${TARGET_SCREAMING}_LINKER" + fi + + if [[ -n '${{ matrix.extra-rust-flags }}' ]]; then + RUSTFLAGS="${RUSTFLAGS} ${{ matrix.extra-rust-flags }}" + export RUSTFLAGS + fi + echo "RUSTFLAGS = ${RUSTFLAGS}" + + echo "Building for target: ${CARGO_BUILD_TARGET}" + cargo build --release --package=ndc-dynamodb-cli + + mkdir -p release + mv -v target/${{ matrix.target }}/release/ndc-dynamodb-cli release/ndc-dynamodb-cli-${{ matrix.target }}${{ matrix.extension }} + + - uses: actions/upload-artifact@v4 + with: + name: ndc-dynamodb-cli-${{ matrix.target }} + path: release + if-no-files-found: error + + release: + name: release to GitHub + needs: + - push-docker-images # not strictly necessary, but if this fails, we should abort + - build-cli-binaries + runs-on: ubuntu-latest + # We release when a tag is pushed. + if: startsWith(github.ref, 'refs/tags/v') + steps: + - uses: actions/checkout@v4 + + - uses: actions/download-artifact@v4 + with: + path: release/artifacts + merge-multiple: true + + - name: generate SHA-256 checksums + run: | + cd release/artifacts + sha256sum * > ./sha256sum + + - name: generate a changelog + run: | + ./scripts/release-notes.py "${GITHUB_REF_NAME}" >> release/notes.md + + - name: generate a connector package + run: | + chmod +x ./release/artifacts/ndc-dynamodb-cli-* + mkdir release/package + ./release/artifacts/ndc-dynamodb-cli-x86_64-unknown-linux-gnu --context=release/package initialize --with-metadata + tar vczf release/artifacts/package.tar.gz -C release/package . + + - name: create a draft release + uses: ncipollo/release-action@v1 + with: + draft: true + bodyFile: release/notes.md + artifacts: release/artifacts/* diff --git a/changelog.md b/changelog.md new file mode 100644 index 0000000..5e1a9b7 --- /dev/null +++ b/changelog.md @@ -0,0 +1,24 @@ +# Changelog + +## [Unreleased] + +### Added + +### Changed + +### Fixed + +## [v0.1.0] - 2024-11-29 + +### Added + +- Initial release with support of ndc-spec v0.1.6 + +### Changed + +### Fixed + + + +[Unreleased]: https://github.com/hasura/ndc-dynamodb/compare/v0.2.0...HEAD +[v0.1.1]: https://github.com/hasura/ndc-dynamodb/releases/tag/v0.1.0 diff --git a/ci/deploy.sh b/ci/deploy.sh new file mode 100644 index 0000000..55880d2 --- /dev/null +++ b/ci/deploy.sh @@ -0,0 +1,139 @@ +#!/usr/bin/env bash +# +# To get the skopeo dependency automatically, run with: +# +# $ nix run .#publish-docker-image +# +set -euo pipefail + +DRY_RUN=false +if [[ "${1:-}" == '-n' || "${1:-}" == '--dry-run' ]]; then + DRY_RUN=true + echo "$(tput bold)$(tput setaf 1)DRY RUN; some steps will be skipped$(tput sgr0)" + shift +fi + +if [[ $# -ne 2 ]]; then + echo >&2 "Usage: ${0} [-n|--dry-run] REF IMAGE" + echo >&2 + echo >&2 ' REF should be in the form "refs/heads/" or "refs/tags/"' + echo >&2 ' (in a Github workflow the variable "github.ref" has this format)' + echo >&2 + echo >&2 ' IMAGE is the path of the Docker image, e.g. "ghcr.io/hasura/ndc-postgres"' + echo >&2 + echo >&2 ' "--dry-run" will not push anything, but it will still build' + exit 1 +fi + +github_ref="$1" +image="$2" + +# Runs the given command, unless `--dry-run` was set. +function run { + if "$DRY_RUN"; then + echo "$(tput bold)$(tput setaf 1)not running:$(tput sgr0) $*" + else + echo "$(tput bold)$(tput setaf 2)running:$(tput sgr0) $*" + "$@" + fi +} + +# Assumes that the given ref is a branch name. Sets a tag for a docker image of +# the form: +# +# dev-main-bffd555 +# --- ---- ------- +# ↑ ↑ ↑ +# prefix "dev" | commit hash +# branch +# +# Additionally sets a branch tag assuming this is the latest tag for the given +# branch. The branch tag has the form: dev-main +# Also sets the 'latest' tag +# Also sets a tag with just the branch short hash +function set_dev_tags { + local branch="$1" + # replace '.' and '/' in branch name with '-' + local tidy_branch + tidy_branch="$(tr './' '-' <<< "$branch")" + local branch_prefix="dev-${tidy_branch}" + local version + local short_hash + short_hash="$(git rev-parse --short=9 HEAD)" + version="${branch_prefix}-${short_hash}" + export docker_tags=("$version" "$branch_prefix" "$short_hash" "latest") +} + +# The Github workflow passes a ref of the form refs/heads/ or +# refs/tags/. This function sets an array of docker image tags based +# on either the given branch or tag name. +# +# If a tag name does not start with a "v" it is assumed to not be a release tag +# so the function sets an empty array. +# +# If the input does look like a release tag, set the tag name as the sole docker +# tag. +# +# If the input is a branch, set docker tags via `set_dev_tags`. +function set_docker_tags { + local input + input="$1" + if [[ $input =~ ^refs/tags/(v.*)$ ]]; then + local tag="${BASH_REMATCH[1]}" + export docker_tags=("$tag" "latest") + elif [[ $input =~ ^refs/heads/(.*)$ ]]; then + local branch="${BASH_REMATCH[1]}" + set_dev_tags "$branch" + else + export docker_tags=("latest") + fi +} + +function publish_multi_arch { + local input + local image_archive + local image_path_for_arch + + architectures=('aarch64' 'x86_64') + + input="$1" + set_docker_tags "$input" + + # do nothing if no tags found + if [[ ${#docker_tags[@]} == 0 ]]; then + echo "The given ref, ${input}, was not a release tag or a branch - will not publish a docker image" + exit + fi + + # build and push the individual images for each architecture + for arch in "${architectures[@]}"; do + # build the docker image + image_archive="docker-archive://$(nix build --print-out-paths ".#docker-${arch}-linux")" + + echo "Will publish docker image with tags: ${docker_tags[*]}" + skopeo inspect "$image_archive" + + image_path_for_arch="${image}-${arch}" + for tag in "${docker_tags[@]}"; do + echo + echo "Pushing docker://${image_path_for_arch}:${tag}" + run skopeo copy "$image_archive" "docker://${image_path_for_arch}:${tag}" + done + done + + # now create and push the manifest + for tag in "${docker_tags[@]}"; do + echo "Creating manifest for ${image}:${tag}" + # create a manifest referencing both architectures + # i did not use a loop here, forgive me + run docker manifest create \ + "$image:$tag" \ + --amend "${image}-aarch64:${tag}" \ + --amend "${image}-x86_64:${tag}" + + # push manifest as the main image + run docker manifest push "${image}:${tag}" + done +} + +publish_multi_arch "$github_ref" diff --git a/crates/configuration/src/version1.rs b/crates/configuration/src/version1.rs index 29fade5..e1f8481 100644 --- a/crates/configuration/src/version1.rs +++ b/crates/configuration/src/version1.rs @@ -127,7 +127,7 @@ pub async fn introspect( let client = aws_sdk_dynamodb::Client::from_conf(config); let tables_result = client.list_tables().send().await; // dbg!(&tables_result); - let tables = tables_result.map_err(|op| { + let tables = tables_result.map_err(|_op| { ParseConfigurationError::IoErrorButStringified(format!( "Failed to list tables:", // op.error_message.unwrap() @@ -175,7 +175,7 @@ pub async fn introspect( } //get non key attributes - let mut result = client + let result = client .execute_statement() .statement( format!( diff --git a/crates/query-engine/execution/src/query.rs b/crates/query-engine/execution/src/query.rs index 059cfd3..239df5f 100644 --- a/crates/query-engine/execution/src/query.rs +++ b/crates/query-engine/execution/src/query.rs @@ -1,13 +1,13 @@ //! Execute an execution plan against the database. -use std::{collections::{hash_map, HashMap}, hash::Hash, process::exit, vec}; +use std::{collections::HashMap, vec}; use crate::error::Error; use crate::metrics; use bytes::{BufMut, Bytes, BytesMut}; use query_engine_sql::sql::string::Param; use serde_json::{self, to_string, Value}; -use aws_sdk_dynamodb::{types::AttributeValue, Client}; +use aws_sdk_dynamodb::Client; use query_engine_sql::sql; @@ -24,15 +24,9 @@ pub async fn execute( // element in the vector is the result of running the query on one set of variables. match plan.query.variables { None => { - // TODO: need to parse this from service account key or allow user to provide it - // TODO(PY) - // let project_id = "hasura-development"; - - // let mut inner_rows = vec![]; - - let mut query_request = (plan.query.query_sql().sql); + let query_request = (plan.query.query_sql().sql); dbg!(&query_request); - let temp_query = "select * from test"; + // let temp_query = "select * from test"; let query_limit: Option = plan.query.limit.map(|limit| limit as i32); // smash query.params in here pls @@ -68,10 +62,10 @@ pub async fn execute( // .collect(), // ); - let temp_param: Option> = Some(vec![AttributeValue::S("foo".to_string())]); + // let temp_param: Option> = Some(vec![AttributeValue::S("foo".to_string())]); // Query - let mut rs = client + let rs = client .execute_statement() .statement( format!( @@ -79,7 +73,7 @@ pub async fn execute( query_request ) ) - .set_parameters(temp_param) + .set_parameters(None) .set_limit(query_limit) .send() .await diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..a250fdb --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,46 @@ +version: "3.6" + +services: + prometheus: + image: prom/prometheus + container_name: prometheus + command: + - "--config.file=/etc/prometheus/prometheus.yml" + ports: + - 9090:9090 + restart: unless-stopped + volumes: + - type: bind + source: ./metrics/prometheus/prometheus.yml + target: /etc/prometheus/prometheus.yml + - type: volume + source: prom_data + target: /prometheus + + grafana: + image: grafana/grafana + container_name: grafana + ports: + - 3001:3000 + restart: unless-stopped + environment: + - GF_SECURITY_ADMIN_USER=admin + - GF_SECURITY_ADMIN_PASSWORD=grafana + volumes: + - type: bind + source: ./metrics/grafana + target: /etc/grafana/provisioning/datasources + read_only: true + - type: bind + source: ./metrics/grafana/dashboard.yaml + target: /etc/grafana/provisioning/dashboards/main.yaml + - type: bind + source: ./metrics/grafana/dashboards + target: /var/lib/grafana/dashboards + - type: volume + source: grafana_data + target: /var/lib/grafana + +volumes: + prom_data: + grafana_data: diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..cc3b519 --- /dev/null +++ b/flake.lock @@ -0,0 +1,97 @@ +{ + "nodes": { + "crane": { + "locked": { + "lastModified": 1725409566, + "narHash": "sha256-PrtLmqhM6UtJP7v7IGyzjBFhbG4eOAHT6LPYOFmYfbk=", + "owner": "ipetkov", + "repo": "crane", + "rev": "7e4586bad4e3f8f97a9271def747cf58c4b68f3c", + "type": "github" + }, + "original": { + "owner": "ipetkov", + "repo": "crane", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1726560853, + "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1726820224, + "narHash": "sha256-CRi4Hm+gp75tL2Ti2iSuuoPn2dfxdxeqlllz00PVHvU=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "98a31cdb8d64cc0d260b9ba6260ca9502bf5c944", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "crane": "crane", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs", + "rust-overlay": "rust-overlay" + } + }, + "rust-overlay": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1726799228, + "narHash": "sha256-wn1leQyMAc/TrLRKcPc0GX6YtoziKQpc/MtZjPNiJ2Q=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "ab150c7412db7bea5879ce2776718f53fba37aa2", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..9493a45 --- /dev/null +++ b/flake.nix @@ -0,0 +1,148 @@ +{ + description = "DynamoDB data connector"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs?branch=nixos-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + + crane = { + url = "github:ipetkov/crane"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + + rust-overlay = { + url = "github:oxalica/rust-overlay"; + inputs = { + nixpkgs.follows = "nixpkgs"; + }; + }; + }; + + outputs = { self, nixpkgs, flake-utils, crane, rust-overlay }: + flake-utils.lib.eachDefaultSystem (localSystem: + let + pkgs = import nixpkgs { + system = localSystem; + overlays = [ rust-overlay.overlays.default ]; + }; + + rust = import ./nix/rust.nix { + inherit nixpkgs rust-overlay crane localSystem; + }; + in + { + packages = { + # a binary for whichever is the local computer + default = rust.callPackage ./nix/app.nix { }; + + # cross compiler an x86_64 linux binary + x86_64-linux = (import ./nix/rust.nix { + inherit nixpkgs rust-overlay crane localSystem; + crossSystem = "x86_64-linux"; + }).callPackage ./nix/app.nix + { }; + # cross compile a aarch64 linux binary + aarch64-linux = (import ./nix/rust.nix { + inherit nixpkgs rust-overlay crane localSystem; + crossSystem = "aarch64-linux"; + }).callPackage ./nix/app.nix + { }; + + # docker for local system + docker = pkgs.callPackage ./nix/docker.nix { + package = self.packages.${localSystem}.default; + image-name = "ghcr.io/hasura/ndc-dynamodb"; + tag = "dev"; + }; + # docker for x86_64-linux + docker-x86_64-linux = pkgs.callPackage ./nix/docker.nix { + package = self.packages.${localSystem}.x86_64-linux; + architecture = "amd64"; + image-name = "ghcr.io/hasura/ndc-dynamodb-x86_64"; + }; + # docker for aarch64-linux + docker-aarch64-linux = pkgs.callPackage ./nix/docker.nix { + package = self.packages.${localSystem}.aarch64-linux; + architecture = "arm64"; + image-name = "ghcr.io/hasura/ndc-dynamodb-aarch64"; + }; + + publish-docker-image = pkgs.writeShellApplication { + name = "publish-docker-image"; + runtimeInputs = with pkgs; [ coreutils skopeo ]; + text = builtins.readFile ./ci/deploy.sh; + }; + }; + + apps = { + default = self.apps.${localSystem}.connector; + + connector = { + type = "app"; + program = "${self.packages.${localSystem}.default}/bin/ndc-dynamodb"; + }; + + cli = { + type = "app"; + program = "${self.packages.${localSystem}.default}/bin/ndc-dynamodb-cli"; + }; + }; + + checks = { + # Build the crate as part of `nix flake check` + ndc-dynamodb = self.packages.${localSystem}.default; + }; + + formatter = pkgs.nixpkgs-fmt; + + devShells = { + default = pkgs.mkShell { + # include dependencies of the default package + inputsFrom = [ self.packages.${localSystem}.default ]; + + # build-time inputs + nativeBuildInputs = [ + # Development + pkgs.just + pkgs.nixpkgs-fmt + pkgs.nodePackages.prettier + pkgs.moreutils + + # Rust + pkgs.bacon + pkgs.cargo-audit + pkgs.cargo-edit + pkgs.cargo-expand + pkgs.cargo-flamegraph + pkgs.cargo-insta + pkgs.cargo-machete + pkgs.cargo-nextest + pkgs.cargo-watch + rust.rustToolchain + + # Benchmarks + pkgs.k6 + + # Deployment + pkgs.skopeo + ]; + }; + } // pkgs.lib.attrsets.optionalAttrs pkgs.hostPlatform.isLinux { + # This performance-testing shell will only work on Linux. + perf = pkgs.mkShell { + inputsFrom = [ + self.devShells.${localSystem}.default + ]; + + # build-time inputs + nativeBuildInputs = [ + pkgs.heaptrack + pkgs.linuxPackages_latest.perf + pkgs.mold-wrapped + pkgs.valgrind + ]; + }; + }; + } + ); +} diff --git a/justfile b/justfile new file mode 100644 index 0000000..77e0564 --- /dev/null +++ b/justfile @@ -0,0 +1,233 @@ +set shell := ["bash", "-c"] + +CONNECTOR_IMAGE_NAME := "ghcr.io/hasura/ndc-postgres" +CONNECTOR_IMAGE_TAG := "dev" +CONNECTOR_IMAGE := CONNECTOR_IMAGE_NAME + ":" + CONNECTOR_IMAGE_TAG + +POSTGRESQL_CONNECTION_STRING := "postgresql://postgres:password@localhost:64002" +POSTGRES_CHINOOK_DEPLOYMENT := "static/chinook-deployment.json" + +COCKROACH_CONNECTION_STRING := "postgresql://postgres:password@localhost:64003" +COCKROACH_CHINOOK_DEPLOYMENT := "static/cockroach/chinook-deployment.json" + +CITUS_CONNECTION_STRING := "postgresql://postgres:password@localhost:64004?sslmode=disable" +CITUS_CHINOOK_DEPLOYMENT := "static/citus/chinook-deployment.json" + +AURORA_CONNECTION_STRING := env_var_or_default('AURORA_CONNECTION_STRING', '') +AURORA_CHINOOK_DEPLOYMENT := "static/aurora/chinook-deployment.json" +AURORA_CHINOOK_DEPLOYMENT_TEMPLATE := "static/aurora/chinook-deployment-template.json" + +# Notes: +# * Building Docker images will not work on macOS. +# You can use `main` instead, by running: +# just --set CONNECTOR_IMAGE_TAG dev-main + +# check everything +check: format-check build lint test + +# run the connector +run: start-dependencies + RUST_LOG=INFO \ + OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4317 \ + OTEL_SERVICE_NAME=postgres-ndc \ + cargo run --bin ndc-postgres --release -- serve --configuration {{POSTGRES_CHINOOK_DEPLOYMENT}} > /tmp/ndc-postgres.log + +# run the connector inside a Docker image +run-in-docker: build-docker-with-nix start-dependencies + #!/usr/bin/env bash + set -e -u -o pipefail + + configuration_file="$(mktemp)" + trap 'rm -f "$configuration_file"' EXIT + + echo '> Generating the configuration...' + docker run \ + --name=postgres-ndc-configuration \ + --rm \ + --detach \ + --platform=linux/amd64 \ + --net='postgres-ndc_default' \ + --publish='9100:9100' \ + {{CONNECTOR_IMAGE}} \ + configuration serve + trap 'docker stop postgres-ndc-configuration' EXIT + CONFIGURATION_SERVER_URL='http://localhost:9100/' + ./scripts/wait-until --timeout=30 --report -- nc -z localhost 9100 + curl -fsS "$CONFIGURATION_SERVER_URL" \ + | jq --arg connection_uris 'postgresql://postgres:password@postgres' '. + {"connection_uris": $connection_uris}' \ + | curl -fsS "$CONFIGURATION_SERVER_URL" -H 'Content-Type: application/json' -d @- \ + > "$configuration_file" + + echo '> Starting the server...' + docker run \ + --name=postgres-ndc \ + --rm \ + --interactive \ + --tty \ + --platform=linux/amd64 \ + --net='postgres-ndc_default' \ + --publish='8100:8100' \ + --env=RUST_LOG='INFO' \ + --mount="type=bind,source=${configuration_file},target=/deployment.json,readonly=true" \ + {{CONNECTOR_IMAGE}} \ + serve \ + --configuration='/deployment.json' + +# watch the code, then test and re-run on changes +dev: start-dependencies + RUST_LOG=INFO \ + OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4317 \ + OTEL_SERVICE_NAME=postgres-ndc \ + cargo watch -i "**/snapshots/*" \ + -c \ + -x 'test -p query-engine-translation -p ndc-postgres' \ + -x clippy \ + -x 'run --bin ndc-postgres -- serve --configuration {{POSTGRES_CHINOOK_DEPLOYMENT}}' + +# watch the code, and re-run on changes +watch-run: start-dependencies + RUST_LOG=DEBUG \ + cargo watch -i "tests/snapshots/*" \ + -c \ + -x 'run --bin ndc-postgres -- serve --configuration {{POSTGRES_CHINOOK_DEPLOYMENT}}' + +# Run ndc-postgres with rust-gdb. +debug: start-dependencies + cargo build + RUST_LOG=DEBUG \ + rust-gdb --args target/debug/ndc-postgres serve --configuration {{POSTGRES_CHINOOK_DEPLOYMENT}} + +# Run the server and produce a flamegraph profile +flamegraph: start-dependencies + RUST_LOG=DEBUG \ + cargo flamegraph --bin ndc-postgres --dev -- \ + serve --configuration {{POSTGRES_CHINOOK_DEPLOYMENT}} > /tmp/ndc-postgres.log + +# build everything +build: + cargo build --all-targets + +# build and open docs +doc: + cargo doc --lib --no-deps --open + +# run all tests +test: start-dependencies create-aurora-deployment + #!/usr/bin/env bash + # enable the "aurora" feature if the connection string is set + features=() + if [[ -n '{{AURORA_CONNECTION_STRING}}' ]]; then + features+=(--features aurora) + else + echo "$(tput bold)$(tput setaf 3)WARNING:$(tput sgr0) Skipping the Aurora tests because the connection string is unset."; \ + fi + echo "$(tput bold)cargo test ${features[@]}$(tput sgr0)" + RUST_LOG=DEBUG cargo test "${features[@]}" + +# re-generate the deployment configuration file +generate-chinook-configuration: build start-dependencies + ./scripts/generate-chinook-configuration.sh 'ndc-postgres' '{{POSTGRESQL_CONNECTION_STRING}}' '{{POSTGRES_CHINOOK_DEPLOYMENT}}' + ./scripts/generate-chinook-configuration.sh 'ndc-citus' '{{CITUS_CONNECTION_STRING}}' '{{CITUS_CHINOOK_DEPLOYMENT}}' + ./scripts/generate-chinook-configuration.sh 'ndc-cockroach' '{{COCKROACH_CONNECTION_STRING}}' '{{COCKROACH_CHINOOK_DEPLOYMENT}}' + @ if [[ -n '{{AURORA_CONNECTION_STRING}}' ]]; then \ + echo "$(tput bold)./scripts/generate-chinook-configuration.sh 'ndc-postgres' '{{AURORA_CONNECTION_STRING}}' '{{AURORA_CHINOOK_DEPLOYMENT_TEMPLATE}}'$(tput sgr0)"; \ + ./scripts/generate-chinook-configuration.sh "ndc-postgres" '{{AURORA_CONNECTION_STRING}}' '{{AURORA_CHINOOK_DEPLOYMENT_TEMPLATE}}'; \ + just create-aurora-deployment; \ + else \ + echo "$(tput bold)$(tput setaf 3)WARNING:$(tput sgr0) Not updating the Aurora configuration because the connection string is unset."; \ + fi + +# run postgres + jaeger +start-dependencies: + # start jaeger, configured to listen to V3 + docker compose -f ../v3-engine/docker-compose.yaml up -d jaeger + +# setup aurora + jaeger +# aurora is a big different, the 'setup' step is taking the +# `AURORA_CONNECTION_STRING` and inserting it into a new copy of the deployment +create-aurora-deployment: + # start jaeger, configured to listen to V3 + docker compose -f ../v3-engine/docker-compose.yaml up -d jaeger + # splice `AURORA_CONNECTION_STRING` into + cat {{ AURORA_CHINOOK_DEPLOYMENT_TEMPLATE }} \ + | jq '.connection_uris[0] =(env | .AURORA_CONNECTION_STRING)' \ + | prettier --parser=json \ + > {{ AURORA_CHINOOK_DEPLOYMENT }} + +# run prometheus + grafana +start-metrics: + @echo "http://localhost:3001/ for grafana console" + docker compose up --wait prometheus grafana + +# run the v3 engine binary, pointing it at our connector +run-engine: start-dependencies + @echo "http://localhost:3000/ for graphiql console" + @echo "http://localhost:4002/ for jaeger console" + # Run graphql-engine using static Chinook metadata + # we expect the `v3-engine` repo to live next door to this one + RUST_LOG=DEBUG cargo run --release \ + --manifest-path ../v3-engine/Cargo.toml \ + --bin engine -- \ + --metadata-path ./static/chinook-metadata.json + +# start a postgres docker image and connect to it using psql +repl-postgres: + @docker compose up --wait postgres + psql {{POSTGRESQL_CONNECTION_STRING}} + +# start a cockroach docker image and connect to it using psql +repl-cockroach: + @docker compose up --wait cockroach + psql {{COCKROACH_CONNECTION_STRING}} + +# start a citus docker image and connect to it using psql +repl-citus: + @docker compose up --wait citus + psql {{CITUS_CONNECTION_STRING}} + +# run `clippy` linter +lint *FLAGS: + cargo clippy {{FLAGS}} + +lint-apply *FLAGS: + cargo clippy --fix {{FLAGS}} + +# reformat everything +format: + cargo fmt --all + ! command -v nixpkgs-fmt > /dev/null || nixpkgs-fmt . + prettier --write . + +# is everything formatted? +format-check: + cargo fmt --all --check + ! command -v nixpkgs-fmt > /dev/null || nixpkgs-fmt --check . + prettier --check . + +# check the nix builds work +build-with-nix: + nix build --no-warn-dirty --print-build-logs '.#ndc-postgres' '.#ndc-cockroach' '.#ndc-citus' + +# check the docker build works +build-docker-with-nix: + #!/usr/bin/env bash + if [[ '{{CONNECTOR_IMAGE_TAG}}' == 'dev' ]]; then + echo "$(tput bold)nix build .#ndc-postgres-docker | gunzip | docker load$(tput sgr0)" + gunzip < "$(nix build --no-warn-dirty --no-link --print-out-paths '.#ndc-postgres-docker')" | docker load + fi + +# check the Postgres arm64 docker build works +build-aarch64-docker-with-nix: + #!/usr/bin/env bash + if [[ '{{CONNECTOR_IMAGE_TAG}}' == 'dev' ]]; then + echo "$(tput bold)nix build .#ndc-postgres-docker-aarch64-linux | gunzip | docker load$(tput sgr0)" + gunzip < "$(nix build --no-warn-dirty --no-link --print-out-paths --system aarch64-linux '.#ndc-postgres-docker-aarch64-linux')" | docker load + fi + +# check the Cockroach arm64 docker build works +build-cockroach-aarch64-docker-with-nix: + #!/usr/bin/env bash + if [[ '{{CONNECTOR_IMAGE_TAG}}' == 'dev' ]]; then + echo "$(tput bold)nix build .#ndc-cockroach-docker-aarch64-linux | gunzip | docker load$(tput sgr0)" + gunzip < "$(nix build --no-warn-dirty --no-link --print-out-paths --system aarch64-linux '.#ndc-cockroach-docker-aarch64-linux')" | docker load + fi diff --git a/metrics/grafana/dashboard.yaml b/metrics/grafana/dashboard.yaml new file mode 100644 index 0000000..81f9295 --- /dev/null +++ b/metrics/grafana/dashboard.yaml @@ -0,0 +1,12 @@ +apiVersion: 1 + +providers: + - name: "Dashboard provider" + orgId: 1 + type: file + disableDeletion: false + updateIntervalSeconds: 10 + allowUiUpdates: true + options: + path: /var/lib/grafana/dashboards + foldersFromFilesStructure: true diff --git a/metrics/grafana/dashboards/postgres-ndc.json b/metrics/grafana/dashboards/postgres-ndc.json new file mode 100644 index 0000000..8be67fb --- /dev/null +++ b/metrics/grafana/dashboards/postgres-ndc.json @@ -0,0 +1,643 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 1, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 9, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 5, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "10.1.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "rate(postgres_ndc_connection_acquisition_wait_time_sum[$__rate_interval])", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Total connection acquisition wait time", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 14, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 3, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "rate(postgres_ndc_query_execution_time_sum[$__rate_interval])", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Total query execution time", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 10 + }, + "id": 2, + "options": { + "displayMode": "gradient", + "minVizHeight": 10, + "minVizWidth": 0, + "orientation": "auto", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "showUnfilled": true, + "valueMode": "color" + }, + "pluginVersion": "10.1.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "postgres_ndc_connection_acquisition_wait_time_bucket", + "format": "heatmap", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Connection acquisition wait time", + "type": "bargauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "{instance=\"host.docker.internal:8100\", job=\"postgres-ndc\", le=\"0.005\"}" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "yellow", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 12, + "y": 10 + }, + "id": 6, + "options": { + "displayMode": "gradient", + "minVizHeight": 10, + "minVizWidth": 0, + "orientation": "auto", + "reduceOptions": { + "calcs": ["last"], + "fields": "", + "values": false + }, + "showUnfilled": true, + "valueMode": "color" + }, + "pluginVersion": "10.1.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "postgres_ndc_query_execution_time_bucket", + "format": "heatmap", + "fullMetaSearch": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Query execution time", + "transparent": true, + "type": "bargauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 20 + }, + "id": 1, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "rate(postgres_ndc_query_total[1m])", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "postgres_ndc_explain_total", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Requests over time", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 14, + "gradientMode": "opacity", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 20 + }, + "id": 4, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "postgres_ndc_pool_size", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "postgres_ndc_pool_max_connections", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "B", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "postgres_ndc_pool_min_connections", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "C", + "useBackend": false + } + ], + "title": "Connection pool size", + "type": "timeseries" + } + ], + "refresh": "", + "schemaVersion": 38, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-15m", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "Postgres NDC", + "uid": "fa23d46b-326f-4e0a-aa3b-0495f5f984d5", + "version": 1, + "weekStart": "" +} diff --git a/metrics/grafana/datasource.yml b/metrics/grafana/datasource.yml new file mode 100644 index 0000000..44999d4 --- /dev/null +++ b/metrics/grafana/datasource.yml @@ -0,0 +1,9 @@ +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + url: http://prometheus:9090 + isDefault: true + access: proxy + editable: true diff --git a/metrics/prometheus/prometheus.yml b/metrics/prometheus/prometheus.yml new file mode 100644 index 0000000..804d32b --- /dev/null +++ b/metrics/prometheus/prometheus.yml @@ -0,0 +1,30 @@ +global: + scrape_interval: 15s + scrape_timeout: 10s + evaluation_interval: 15s +alerting: + alertmanagers: + - static_configs: + - targets: [] + scheme: http + timeout: 10s + api_version: v1 +scrape_configs: + - job_name: prometheus + honor_timestamps: true + scrape_interval: 15s + scrape_timeout: 10s + metrics_path: /metrics + scheme: http + static_configs: + - targets: + - localhost:9090 + - job_name: postgres-ndc + honor_timestamps: true + scrape_interval: 15s + scrape_timeout: 10s + metrics_path: /metrics + scheme: http + static_configs: + - targets: + - host.docker.internal:8100 diff --git a/nix/app.nix b/nix/app.nix new file mode 100644 index 0000000..b04654a --- /dev/null +++ b/nix/app.nix @@ -0,0 +1,56 @@ +# This is a function that returns a derivation for the compiled Rust project. +{ craneLib +, lib +, hostPlatform +, openssl +, libiconv +, pkg-config +, protobuf +, darwin +}: +let + buildArgs = { + pname = "ndc-dynamodb"; + version = "0.1.0"; + + src = + let + isJsonFile = path: _type: builtins.match ".*json" path != null; + isSqlFile = path: _type: builtins.match ".*sql" path != null; + isSourceFile = path: type: + isJsonFile path type + || isSqlFile path type + || craneLib.filterCargoSources path type; + in + lib.cleanSourceWith { src = craneLib.path ./..; filter = isSourceFile; }; + + strictDeps = true; + + # build-time inputs + nativeBuildInputs = [ + openssl.dev # required to build Rust crates that can conduct TLS connections + pkg-config # required to find OpenSSL + ]; + + # runtime inputs + buildInputs = [ + openssl # required for TLS connections + protobuf # required by opentelemetry-proto, a dependency of axum-tracing-opentelemetry + ] ++ lib.optionals hostPlatform.isDarwin [ + # macOS-specific dependencies + libiconv + darwin.apple_sdk.frameworks.CoreFoundation + darwin.apple_sdk.frameworks.Security + darwin.apple_sdk.frameworks.SystemConfiguration + ]; + }; + + # Build the dependencies first. + cargoArtifacts = craneLib.buildDepsOnly buildArgs; +in +# Then build the crate. +craneLib.buildPackage + (buildArgs // { + inherit cargoArtifacts; + doCheck = false; + }) diff --git a/nix/docker.nix b/nix/docker.nix new file mode 100644 index 0000000..437a4a3 --- /dev/null +++ b/nix/docker.nix @@ -0,0 +1,47 @@ +# This is a function that returns a derivation for a docker image. +{ dockerTools +, lib +, package +, image-name +, pkgs +, architecture ? null +, tag ? null # defaults to the output hash +, extraConfig ? { } # see config options at: https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions +}: +let + seconds = 1000 * 1000 * 1000; # nanoseconds in 1 second + args = { + name = image-name; + created = "now"; + contents = [ pkgs.cacert package ]; + config = { + Entrypoint = [ + "/bin/${package.pname}" + ]; + Cmd = [ + "serve" + ]; + Env = [ + ''HASURA_CONFIGURATION_DIRECTORY=/etc/connector'' + ]; + ExposedPorts = { "8080/tcp" = { }; }; + Healthcheck = { + Test = [ + "CMD" + "/bin/${package.pname}" + "check-health" + ]; + StartInterval = 1 * seconds; + Interval = 5 * seconds; + Timeout = 10 * seconds; + Retries = 3; + }; + } // extraConfig; + } + // lib.optionalAttrs (tag != null) { + inherit tag; + } // lib.optionalAttrs (architecture != null) { + inherit architecture; + }; +in +dockerTools.buildLayeredImage args diff --git a/nix/rust.nix b/nix/rust.nix new file mode 100644 index 0000000..cba7ccd --- /dev/null +++ b/nix/rust.nix @@ -0,0 +1,59 @@ +# Sets up our Rust toolchain and Crane for cross-compilation. +# This is mostly a copy of the example provided at: +# https://crane.dev/examples/cross-rust-overlay.html +{ nixpkgs +, rust-overlay +, crane +, localSystem +, crossSystem ? localSystem +}: +let + pkgs = import nixpkgs { + inherit crossSystem localSystem; + overlays = [ rust-overlay.overlays.default ]; + }; + + lib = pkgs.pkgsBuildHost.lib; + + # Converts host system string for use in environment variable names + envCase = triple: lib.strings.toUpper (builtins.replaceStrings [ "-" ] [ "_" ] triple); + + # `hostPlatform` is the cross-compilation output platform; + # `buildPlatform` is the platform we are compiling on + buildPlatform = pkgs.stdenv.buildPlatform; + hostPlatform = pkgs.stdenv.hostPlatform; + + # When possibly cross-compiling we get several versions of nixpkgs of the + # form, `pkgs.pkgs`. We use + # `pkgs.pkgsBuildHost` to get packages that run at build time (so run on the + # build platform), and that produce outputs for the host platform which is the + # cross-compilation target. + rustBin = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ../rust-toolchain.toml; + rustToolchain = rustBin.override { targets = [ hostPlatform.config ]; }; + craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; + + buildEnv = { + CARGO_BUILD_TARGET = hostPlatform.config; + "CARGO_TARGET_${envCase hostPlatform.config}_LINKER" = "${pkgs.stdenv.cc.targetPrefix}cc"; + + # This environment variable may be necessary if any of your dependencies use + # a build-script which invokes the `cc` crate to build some other code. The + # `cc` crate should automatically pick up on our target-specific linker + # above, but this may be necessary if the build script needs to compile and + # run some extra code on the build system. + HOST_CC = "${pkgs.stdenv.cc.nativePrefix}cc"; + }; +in +{ + inherit rustToolchain; + + callPackage = (package: args: + # Call the package, providing `craneLib` as an extra. + let crate = pkgs.callPackage package (args // { inherit craneLib; }); + in + # Override the derivation to add cross-compilation environment variables. + crate.overrideAttrs (previous: buildEnv // { + # We also have to override the `cargoArtifacts` derivation with the same changes. + cargoArtifacts = previous.cargoArtifacts.overrideAttrs (previous: buildEnv); + })); +} diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 0000000..71eb619 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,4 @@ +[toolchain] +channel = "1.78.0" +profile = "default" # see https://rust-lang.github.io/rustup/concepts/profiles.html +components = ["rust-analyzer", "rust-src"] # see https://rust-lang.github.io/rustup/concepts/components.html diff --git a/scripts/generate-chinook-configuration.sh b/scripts/generate-chinook-configuration.sh new file mode 100755 index 0000000..c649474 --- /dev/null +++ b/scripts/generate-chinook-configuration.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash +set -e -u -o pipefail + +EXECUTABLE="$1" +CONNECTION_STRING="$2" +CHINOOK_DEPLOYMENT="$3" + +# ensure we clean up +function stop { + if [[ "${CONFIGURATION_SERVER_PID+x}" ]]; then + kill "$CONFIGURATION_SERVER_PID" + fi + if [[ "${NEW_FILE+x}" ]]; then + rm -f "$NEW_FILE" + fi +} + +trap stop EXIT + +# start the configuration server +cargo run --bin "$EXECUTABLE" --quiet -- configuration serve & +CONFIGURATION_SERVER_PID=$! +./scripts/wait-until --timeout=30 --report -- nc -z localhost 9100 +if ! kill -0 "$CONFIGURATION_SERVER_PID"; then + echo >&2 'The server stopped abruptly.' + exit 1 +fi + +# grab .connection_uris and .metadata.native_queries from the current file +PRESERVED_DATA="$(jq '{"connection_uris": .connection_uris, "metadata": {"native_queries": .metadata.native_queries}}' "$CHINOOK_DEPLOYMENT")" + +# create a temporary file for the output so we don't overwrite data by accident +NEW_FILE="$(mktemp)" + +# 1. Pass the connection string to the configuration server to generate the +# initial deployment from introspection +# 2. Splice in the preserved data from above +# 3. Format the file +# +# Because we `set -o pipefail` above, this will fail if any of the steps fail, +# and we will abort without overwriting the original file. +curl -fsS http://localhost:9100 \ + | jq \ + --arg connection_uris "$CONNECTION_STRING" \ + '. + {"connection_uris": [$connection_uris], "version": 1, "metadata": {}, "aggregate_functions": {}}' \ + | curl -fsS http://localhost:9100 -H 'Content-Type: application/json' -d @- \ + | jq --argjson preserved_data "$PRESERVED_DATA" '. * $preserved_data' \ + | prettier --parser=json \ + > "$NEW_FILE" + +# If the above command succeeded, overwrite the original file. +mv -f "$NEW_FILE" "$CHINOOK_DEPLOYMENT" diff --git a/scripts/release-notes.py b/scripts/release-notes.py new file mode 100755 index 0000000..1eb6816 --- /dev/null +++ b/scripts/release-notes.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 + +# Generates the release notes for a GitHub release automatically. +# +# These notes consist of: +# 1. the Docker image name +# 2. the part of the changelog corresponding to the given version + +import argparse +import sys + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("version") + args = parser.parse_args() + + print_docker_image(args.version) + print_changelog_for_version(args.version) + +def print_docker_image(version): + print('The connector Docker image is:') + print('') + print(f' ghcr.io/hasura/ndc-dynamodb:{version}') + print('') + +def print_changelog_for_version(version): + recording = False + changelog_lines = [] + with open("changelog.md") as file: + for line in file: + line = line.rstrip() # remove trailing spaces and newline + # start recording lines when we find the entry corresponding to the + # given version + if line.startswith(f"## [{version}]"): + recording = True + continue + if recording: + # stop when we hit the next section or the end + if line.startswith("## ") or line == "": + break + changelog_lines.append(line) + + # discard blank lines at the start or end + try: + while changelog_lines[0] == "": + changelog_lines.pop(0) + while changelog_lines[-1] == "": + changelog_lines.pop() + except IndexError: + pass + + # if it's empty, we have failed + if not changelog_lines: + print(f"Could not find a changelog for version {version}.", file=sys.stderr) + sys.exit(1) + + # print the result + print('## Changelog') + print() + for line in changelog_lines: + print(line) + +if __name__ == '__main__': + main() diff --git a/scripts/wait-until b/scripts/wait-until new file mode 100755 index 0000000..d5cdb08 --- /dev/null +++ b/scripts/wait-until @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 + +import argparse +import subprocess +import sys +import time + +REPORT_DELAY = 10 + +parser = argparse.ArgumentParser( + description='Repeatedly tries the provided command until it succeeds.') +parser.add_argument( + '-d', '--delay', + type=int, default=1, + help='delay between retries') +parser.add_argument( + '-t', '--timeout', + type=int, + help='maximum amount of time before giving up') +parser.add_argument( + '-r', '--report', + action='store_true', + help='periodically report the time waited so far') +parser.add_argument('command', nargs='+') +args = parser.parse_args() + +start_time = time.time() +last_reported = start_time +while subprocess.run(args.command).returncode != 0: + now = time.time() + if args.timeout and now - start_time > args.timeout: + print('Timed out waiting for the command to succeed.', file=sys.stderr) + sys.exit(1) + + if args.report and now - last_reported > REPORT_DELAY: + print('Waited for %d seconds...' % (now - start_time), file=sys.stderr) + last_reported = now + + time.sleep(args.delay) diff --git a/static/configuration.json b/static/configuration.json new file mode 100644 index 0000000..f31c1ab --- /dev/null +++ b/static/configuration.json @@ -0,0 +1,366 @@ +{ + "version": 1, + "connectionSettings": { + "connectionPlaceholder": { + "variable": "HASURA_DYNAMODB_CONNECTION_URI_PLACEHOLDER" + } + }, + "metadata": { + "tables": { + "Complaint_management_system": { + "tableName": "Complaint_management_system", + "columns": { + "PK": { + "name": "PK", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "SK": { + "name": "SK", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "agentID": { + "name": "agentID", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "comm_date": { + "name": "comm_date", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "complaint_id": { + "name": "complaint_id", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "customer_id": { + "name": "customer_id", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "escalated_to": { + "name": "escalated_to", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "escalation_time": { + "name": "escalation_time", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + } + }, + "partitionKey": "PK", + "sortKey": "SK", + "gsi": { + "Agents_Comments_GSI": { + "partition_key": "agentID", + "sort_key": "comm_date", + "projection_type": { + "projection_type": "ALL", + "non_key_attributes": [] + } + }, + "Customer_Complaint_GSI": { + "partition_key": "customer_id", + "sort_key": "complaint_id", + "projection_type": { + "projection_type": "ALL", + "non_key_attributes": [] + } + }, + "Escalations_GSI": { + "partition_key": "escalated_to", + "sort_key": "escalation_time", + "projection_type": { + "projection_type": "ALL", + "non_key_attributes": [] + } + } + }, + "description": null + }, + "SkiLifts": { + "tableName": "SkiLifts", + "columns": { + "Lift": { + "name": "Lift", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "Metadata": { + "name": "Metadata", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "TotalUniqueLiftRiders": { + "name": "TotalUniqueLiftRiders", + "type": { + "scalarType": "Number" + }, + "nullable": "nullable", + "description": null + } + }, + "partitionKey": "Lift", + "sortKey": "Metadata", + "gsi": { + "SkiLiftsByRiders": { + "partition_key": "Lift", + "sort_key": "TotalUniqueLiftRiders", + "projection_type": { + "projection_type": "INCLUDE", + "non_key_attributes": [ + "Metadata" + ] + } + } + }, + "description": null + }, + "test": { + "tableName": "test", + "columns": { + "1": { + "name": "1", + "type": { + "scalarType": "Number" + }, + "nullable": "nullable", + "description": null + }, + "country": { + "name": "country", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "department": { + "name": "department", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + }, + "id": { + "name": "id", + "type": { + "scalarType": "Number" + }, + "nullable": "nullable", + "description": null + }, + "name": { + "name": "name", + "type": { + "scalarType": "String" + }, + "nullable": "nullable", + "description": null + } + }, + "partitionKey": "id", + "sortKey": "name", + "gsi": { + "countryindex": { + "partition_key": "country", + "sort_key": null, + "projection_type": { + "projection_type": "ALL", + "non_key_attributes": [] + } + }, + "countryindexwithsortkey": { + "partition_key": "country", + "sort_key": "name", + "projection_type": { + "projection_type": "ALL", + "non_key_attributes": [] + } + }, + "department": { + "partition_key": "1", + "sort_key": null, + "projection_type": { + "projection_type": "ALL", + "non_key_attributes": [] + } + }, + "departmentindex": { + "partition_key": "department", + "sort_key": null, + "projection_type": { + "projection_type": "ALL", + "non_key_attributes": [] + } + }, + "idindex": { + "partition_key": "id", + "sort_key": null, + "projection_type": { + "projection_type": "ALL", + "non_key_attributes": [] + } + }, + "idindexwithsortkey": { + "partition_key": "id", + "sort_key": "country", + "projection_type": { + "projection_type": "ALL", + "non_key_attributes": [] + } + }, + "test_gsi": { + "partition_key": "id", + "sort_key": "country", + "projection_type": { + "projection_type": "INCLUDE", + "non_key_attributes": [ + "name", + "id", + "department" + ] + } + } + }, + "description": null + } + }, + "scalarTypes": { + "Number": { + "typeName": "Number", + "description": null, + "aggregateFunctions": {}, + "comparisonOperators": { + "_eq": { + "operatorName": "=", + "operatorKind": "equal", + "argumentType": "Number", + "isInfix": true + }, + "_gt": { + "operatorName": ">", + "operatorKind": "custom", + "argumentType": "Number", + "isInfix": true + }, + "_gte": { + "operatorName": ">=", + "operatorKind": "custom", + "argumentType": "Number", + "isInfix": true + }, + "_in": { + "operatorName": "IN", + "operatorKind": "in", + "argumentType": "Number", + "isInfix": true + }, + "_lt": { + "operatorName": "<", + "operatorKind": "custom", + "argumentType": "Number", + "isInfix": true + }, + "_lte": { + "operatorName": "<=", + "operatorKind": "custom", + "argumentType": "Number", + "isInfix": true + }, + "_neq": { + "operatorName": "!=", + "operatorKind": "custom", + "argumentType": "Number", + "isInfix": true + } + }, + "typeRepresentation": null + }, + "String": { + "typeName": "String", + "description": null, + "aggregateFunctions": {}, + "comparisonOperators": { + "_eq": { + "operatorName": "=", + "operatorKind": "equal", + "argumentType": "String", + "isInfix": true + }, + "_gt": { + "operatorName": ">", + "operatorKind": "custom", + "argumentType": "String", + "isInfix": true + }, + "_gte": { + "operatorName": ">=", + "operatorKind": "custom", + "argumentType": "String", + "isInfix": true + }, + "_in": { + "operatorName": "IN", + "operatorKind": "in", + "argumentType": "String", + "isInfix": true + }, + "_lt": { + "operatorName": "<", + "operatorKind": "custom", + "argumentType": "String", + "isInfix": true + }, + "_lte": { + "operatorName": "<=", + "operatorKind": "custom", + "argumentType": "String", + "isInfix": true + }, + "_neq": { + "operatorName": "!=", + "operatorKind": "custom", + "argumentType": "String", + "isInfix": true + } + }, + "typeRepresentation": null + } + } + } +} diff --git a/static/temp-deploys/.gitkeep b/static/temp-deploys/.gitkeep new file mode 100644 index 0000000..e69de29