From 56f5c224aab0f5b24c1be1fe8815c39e82d7edca Mon Sep 17 00:00:00 2001 From: pranshi06 <85474619+pranshi06@users.noreply.github.com> Date: Tue, 3 Dec 2024 15:40:13 +0530 Subject: [PATCH] dynamodb connector - rust sdk (#5) * wip: dynamodb connector - rust sdk * add pagination * fix order by and where clause * gsi query * non-key attributes * env vars * query result - remove exit code * wip: packaging code * wip: code cleanup * fmt * cargo clippy * cargo clippy * type representation * fix sets type column result * cleanup --------- Co-authored-by: py --- .editorconfig | 12 - .github/dependabot.yml | 12 + .github/pull-request-template.md | 9 + .github/workflows/build-and-test.yml | 27 - .github/workflows/cargo-build.yaml | 44 + .github/workflows/cargo-test.yaml | 89 + .github/workflows/check-format.yaml | 36 + .github/workflows/nix-check.yaml | 51 + .github/workflows/ship.yaml | 249 + .gitignore | 2 +- .nvmrc | 1 - Cargo.lock | 4272 ++++++++++++ Cargo.toml | 74 + Dockerfile | 22 - changelog.md | 24 + ci/deploy.sh | 139 + crates/cli/Cargo.toml | 29 + crates/cli/build.rs | 20 + crates/cli/src/lib.rs | 191 + crates/cli/src/main.rs | 62 + crates/cli/src/metadata.rs | 78 + crates/configuration/Cargo.toml | 32 + crates/configuration/src/configuration.rs | 27 + .../configuration/src/connection_settings.rs | 39 + crates/configuration/src/environment.rs | 189 + crates/configuration/src/error.rs | 76 + crates/configuration/src/lib.rs | 22 + .../src/to_runtime_configuration.rs | 288 + .../src/values/connection_info.rs | 64 + crates/configuration/src/values/mod.rs | 6 + .../configuration/src/values/pool_settings.rs | 56 + crates/configuration/src/values/secret.rs | 26 + crates/configuration/src/version1.rs | 472 ++ crates/ndc-dynamodb/Cargo.toml | 39 + crates/ndc-dynamodb/bin/main.rs | 17 + crates/ndc-dynamodb/src/capabilities.rs | 30 + crates/ndc-dynamodb/src/connector.rs | 277 + crates/ndc-dynamodb/src/error/convert.rs | 40 + crates/ndc-dynamodb/src/error/mod.rs | 4 + crates/ndc-dynamodb/src/error/record.rs | 45 + crates/ndc-dynamodb/src/health.rs | 47 + crates/ndc-dynamodb/src/lib.rs | 12 + crates/ndc-dynamodb/src/main.rs | 3 + crates/ndc-dynamodb/src/query.rs | 87 + crates/ndc-dynamodb/src/schema.rs | 178 + crates/ndc-dynamodb/src/state.rs | 60 + crates/query-engine/execution/Cargo.toml | 24 + crates/query-engine/execution/src/error.rs | 46 + crates/query-engine/execution/src/lib.rs | 6 + crates/query-engine/execution/src/metrics.rs | 314 + crates/query-engine/execution/src/query.rs | 129 + crates/query-engine/metadata/Cargo.toml | 14 + crates/query-engine/metadata/src/lib.rs | 3 + .../metadata/src/metadata/database.rs | 286 + .../query-engine/metadata/src/metadata/mod.rs | 25 + crates/query-engine/sql/Cargo.toml | 16 + crates/query-engine/sql/src/lib.rs | 4 + crates/query-engine/sql/src/sql/ast.rs | 454 ++ crates/query-engine/sql/src/sql/convert.rs | 816 +++ .../sql/src/sql/execution_plan.rs | 81 + crates/query-engine/sql/src/sql/helpers.rs | 243 + crates/query-engine/sql/src/sql/mod.rs | 8 + .../sql/src/sql/rewrites/constant_folding.rs | 377 ++ .../query-engine/sql/src/sql/rewrites/mod.rs | 2 + crates/query-engine/sql/src/sql/string.rs | 62 + crates/query-engine/translation/Cargo.toml | 25 + crates/query-engine/translation/src/lib.rs | 4 + .../translation/src/translation/error.rs | 251 + .../translation/src/translation/helpers.rs | 321 + .../translation/src/translation/mod.rs | 5 + .../src/translation/query/fields.rs | 186 + .../src/translation/query/filtering.rs | 651 ++ .../translation/src/translation/query/mod.rs | 62 + .../src/translation/query/operators.rs | 67 + .../translation/src/translation/query/root.rs | 240 + .../src/translation/query/sorting.rs | 421 ++ .../src/translation/query/values.rs | 179 + docker-compose.yaml | 46 + flake.lock | 97 + flake.nix | 148 + jest.config.js | 6 - justfile | 233 + metrics/grafana/dashboard.yaml | 12 + metrics/grafana/dashboards/postgres-ndc.json | 643 ++ metrics/grafana/datasource.yml | 9 + metrics/prometheus/prometheus.yml | 30 + nix/app.nix | 56 + nix/docker.nix | 47 + nix/rust.nix | 59 + package-lock.json | 5852 ----------------- package.json | 29 - rust-toolchain.toml | 4 + scripts/generate-chinook-configuration.sh | 52 + scripts/release-notes.py | 64 + scripts/wait-until | 39 + src/capabilities.ts | 8 - src/configuration-schema.generated.json | 273 - src/configuration.ts | 88 - src/connector.ts | 108 - src/index.ts | 4 - src/query.ts | 490 -- src/result.ts | 115 - src/schema-dynamo.ts | 231 - src/schema-ndc.ts | 486 -- src/util.ts | 37 - static/configuration.json | 547 ++ static/temp-deploys/.gitkeep | 0 test/schema-ndc.test.ts | 1104 ---- test/util.ts | 12 - tsconfig.json | 12 - 110 files changed, 14924 insertions(+), 8918 deletions(-) delete mode 100644 .editorconfig create mode 100644 .github/dependabot.yml create mode 100644 .github/pull-request-template.md delete mode 100644 .github/workflows/build-and-test.yml create mode 100644 .github/workflows/cargo-build.yaml create mode 100644 .github/workflows/cargo-test.yaml create mode 100644 .github/workflows/check-format.yaml create mode 100644 .github/workflows/nix-check.yaml create mode 100644 .github/workflows/ship.yaml delete mode 100644 .nvmrc create mode 100644 Cargo.lock create mode 100644 Cargo.toml delete mode 100644 Dockerfile create mode 100644 changelog.md create mode 100644 ci/deploy.sh create mode 100644 crates/cli/Cargo.toml create mode 100644 crates/cli/build.rs create mode 100644 crates/cli/src/lib.rs create mode 100644 crates/cli/src/main.rs create mode 100644 crates/cli/src/metadata.rs create mode 100644 crates/configuration/Cargo.toml create mode 100644 crates/configuration/src/configuration.rs create mode 100644 crates/configuration/src/connection_settings.rs create mode 100644 crates/configuration/src/environment.rs create mode 100644 crates/configuration/src/error.rs create mode 100644 crates/configuration/src/lib.rs create mode 100644 crates/configuration/src/to_runtime_configuration.rs create mode 100644 crates/configuration/src/values/connection_info.rs create mode 100644 crates/configuration/src/values/mod.rs create mode 100644 crates/configuration/src/values/pool_settings.rs create mode 100644 crates/configuration/src/values/secret.rs create mode 100644 crates/configuration/src/version1.rs create mode 100644 crates/ndc-dynamodb/Cargo.toml create mode 100644 crates/ndc-dynamodb/bin/main.rs create mode 100644 crates/ndc-dynamodb/src/capabilities.rs create mode 100644 crates/ndc-dynamodb/src/connector.rs create mode 100644 crates/ndc-dynamodb/src/error/convert.rs create mode 100644 crates/ndc-dynamodb/src/error/mod.rs create mode 100644 crates/ndc-dynamodb/src/error/record.rs create mode 100644 crates/ndc-dynamodb/src/health.rs create mode 100644 crates/ndc-dynamodb/src/lib.rs create mode 100644 crates/ndc-dynamodb/src/main.rs create mode 100644 crates/ndc-dynamodb/src/query.rs create mode 100644 crates/ndc-dynamodb/src/schema.rs create mode 100644 crates/ndc-dynamodb/src/state.rs create mode 100644 crates/query-engine/execution/Cargo.toml create mode 100644 crates/query-engine/execution/src/error.rs create mode 100644 crates/query-engine/execution/src/lib.rs create mode 100644 crates/query-engine/execution/src/metrics.rs create mode 100644 crates/query-engine/execution/src/query.rs create mode 100644 crates/query-engine/metadata/Cargo.toml create mode 100644 crates/query-engine/metadata/src/lib.rs create mode 100644 crates/query-engine/metadata/src/metadata/database.rs create mode 100644 crates/query-engine/metadata/src/metadata/mod.rs create mode 100644 crates/query-engine/sql/Cargo.toml create mode 100644 crates/query-engine/sql/src/lib.rs create mode 100644 crates/query-engine/sql/src/sql/ast.rs create mode 100644 crates/query-engine/sql/src/sql/convert.rs create mode 100644 crates/query-engine/sql/src/sql/execution_plan.rs create mode 100644 crates/query-engine/sql/src/sql/helpers.rs create mode 100644 crates/query-engine/sql/src/sql/mod.rs create mode 100644 crates/query-engine/sql/src/sql/rewrites/constant_folding.rs create mode 100644 crates/query-engine/sql/src/sql/rewrites/mod.rs create mode 100644 crates/query-engine/sql/src/sql/string.rs create mode 100644 crates/query-engine/translation/Cargo.toml create mode 100644 crates/query-engine/translation/src/lib.rs create mode 100644 crates/query-engine/translation/src/translation/error.rs create mode 100644 crates/query-engine/translation/src/translation/helpers.rs create mode 100644 crates/query-engine/translation/src/translation/mod.rs create mode 100644 crates/query-engine/translation/src/translation/query/fields.rs create mode 100644 crates/query-engine/translation/src/translation/query/filtering.rs create mode 100644 crates/query-engine/translation/src/translation/query/mod.rs create mode 100644 crates/query-engine/translation/src/translation/query/operators.rs create mode 100644 crates/query-engine/translation/src/translation/query/root.rs create mode 100644 crates/query-engine/translation/src/translation/query/sorting.rs create mode 100644 crates/query-engine/translation/src/translation/query/values.rs create mode 100644 docker-compose.yaml create mode 100644 flake.lock create mode 100644 flake.nix delete mode 100644 jest.config.js create mode 100644 justfile create mode 100644 metrics/grafana/dashboard.yaml create mode 100644 metrics/grafana/dashboards/postgres-ndc.json create mode 100644 metrics/grafana/datasource.yml create mode 100644 metrics/prometheus/prometheus.yml create mode 100644 nix/app.nix create mode 100644 nix/docker.nix create mode 100644 nix/rust.nix delete mode 100644 package-lock.json delete mode 100644 package.json create mode 100644 rust-toolchain.toml create mode 100755 scripts/generate-chinook-configuration.sh create mode 100755 scripts/release-notes.py create mode 100755 scripts/wait-until delete mode 100644 src/capabilities.ts delete mode 100644 src/configuration-schema.generated.json delete mode 100644 src/configuration.ts delete mode 100644 src/connector.ts delete mode 100644 src/index.ts delete mode 100644 src/query.ts delete mode 100644 src/result.ts delete mode 100644 src/schema-dynamo.ts delete mode 100644 src/schema-ndc.ts delete mode 100644 src/util.ts create mode 100644 static/configuration.json create mode 100644 static/temp-deploys/.gitkeep delete mode 100644 test/schema-ndc.test.ts delete mode 100644 test/util.ts delete mode 100644 tsconfig.json diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index 5d47c21..0000000 --- a/.editorconfig +++ /dev/null @@ -1,12 +0,0 @@ -# EditorConfig is awesome: https://EditorConfig.org - -# top-most EditorConfig file -root = true - -[*] -indent_style = space -indent_size = 2 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..973ac41 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +version: 2 + +updates: + - package-ecosystem: "cargo" + directory: "/" + schedule: + interval: "weekly" + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/pull-request-template.md b/.github/pull-request-template.md new file mode 100644 index 0000000..6bc927e --- /dev/null +++ b/.github/pull-request-template.md @@ -0,0 +1,9 @@ + + +### What + + + +### How + + diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml deleted file mode 100644 index a2ac63d..0000000 --- a/.github/workflows/build-and-test.yml +++ /dev/null @@ -1,27 +0,0 @@ -# This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node -# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs - -name: Build and Test - -on: - push: - branches: [ "main" ] - pull_request: - branches: [ "main" ] - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Setup Node.js - uses: actions/setup-node@v3 - with: - node-version-file: .nvmrc - cache: 'npm' - - - run: npm ci - - run: npm run typecheck - - run: npm test diff --git a/.github/workflows/cargo-build.yaml b/.github/workflows/cargo-build.yaml new file mode 100644 index 0000000..2d94855 --- /dev/null +++ b/.github/workflows/cargo-build.yaml @@ -0,0 +1,44 @@ +on: [push] + +name: DynamoDB NDC build + +jobs: + build: + name: Build DynamoDB NDC + runs-on: ubuntu-latest + env: + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + RUSTFLAGS: "-D warnings" # fail on warnings + steps: + - uses: actions/checkout@v4 + + - name: install protoc + uses: arduino/setup-protoc@v2 + with: + version: "23.3" + + - name: install tools + run: | + rustup show + + - uses: Swatinem/rust-cache@v2 + + - name: build crates + run: | + cargo build --locked --release --all-features + + - name: lint + run: | + cargo clippy --release -- --deny=clippy::all + + # scream into Slack if something goes wrong + - name: report status + if: github.ref == 'refs/heads/main' + uses: ravsamhq/notify-slack-action@v2 + with: + status: ${{ job.status }} + notify_when: failure + notification_title: "😧 Error on <{repo_url}|{repo}>" + message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/cargo-test.yaml b/.github/workflows/cargo-test.yaml new file mode 100644 index 0000000..ec65512 --- /dev/null +++ b/.github/workflows/cargo-test.yaml @@ -0,0 +1,89 @@ +# on: [push] + +# name: DynamoDB NDC tests + +# jobs: +# test-query-engine: +# name: Test query-engine +# runs-on: ubuntu-latest +# env: +# CARGO_NET_GIT_FETCH_WITH_CLI: "true" +# RUSTFLAGS: "-D warnings" # fail on warnings +# steps: +# - uses: actions/checkout@v4 + +# - name: install tools +# run: | +# rustup show + +# - uses: Swatinem/rust-cache@v2 + +# - name: run tests +# run: | +# cargo test --release -p query-engine-translation +# env: +# RUST_LOG: INFO + +# # scream into Slack if something goes wrong +# - name: report status +# if: github.ref == 'refs/heads/main' +# uses: ravsamhq/notify-slack-action@v2 +# with: +# status: ${{ job.status }} +# notify_when: failure +# notification_title: "😧 Error on <{repo_url}|{repo}>" +# message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" +# env: +# SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} + +# test-dynamodb-ndc: +# name: Test ndc-dynamodb +# runs-on: ubuntu-latest +# env: +# CARGO_NET_GIT_FETCH_WITH_CLI: "true" +# RUSTFLAGS: "-D warnings" # fail on warnings + +# steps: +# - uses: actions/checkout@v4 + +# - name: install protoc +# uses: arduino/setup-protoc@v2 +# with: +# version: "23.3" + +# - name: install tools +# run: | +# rustup show + +# - uses: Swatinem/rust-cache@v2 +# with: +# shared-key: "build" # share the cache across jobs + +# # - uses: hoverkraft-tech/compose-action@v1.5.1 +# # with: +# # compose-file: "./docker-compose.yaml" +# # up-flags: "--pull=always --wait" +# # down-flags: "--volumes" +# # services: | +# # dynamodb + +# - name: run tests +# run: | +# cargo test --release --workspace +# env: +# HASURA_BIGQUERY_SERVICE_KEY: "{\"type\": \"service_account\",\"project_id\": \"hasura-development\",\"private_key_id\": \"222dd3f9e98b6743bb8d74d7a126fe89e6ac221d\",\"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDZuxyxWk6bOxHr\\nht+MPZ7Q+F4D7AzTqCTZOmcldod+KHMlUCKwIOLQabAO8TEPhvcYyzBQ4gCwoN2i\\n7VoQbHmlTpQu1s43K25oIoEIicFTbHcL4MALiFnT44XYl+PxL+e//GibJYGjwqI+\\n3o2Go//o8BOfQEO/PPiQdub8/4VQjXGE0+xLeUjYURiJPu8ojrL2FdIqvzMaDWKq\\n2r6KuukIeWkqmt6fHnrGiseavg3g7pBPjqmRtX6ekY74XbkTQk1kmCKf9MLjZ1UI\\n+8QNp1C4pO4eDbp1Zkqz3uHhzccUvStkSCmppjKfD64Tp+6ExbUnMfq1UJ0GJBDM\\nVPeJF6+PAgMBAAECggEAFFSf88GKeH02CaeQ5S/1ze57HOOkOPlI443MBtgAA9w0\\nEEZgztBrTWmo+mQ0IA6KsSJ78vl/df63Y1jFYaY3X6OsO4lsPQONriSWptzyE9+b\\naB0G4azMMnhazaQ1MRa3jZo8jEwexFNOwg8W6P0UTsRoGKUwDkHbteWcYQBdCu3W\\nFa/CX3Tw0n/DdAVNi8Ai9K0d+Okmcv+ZRopeNuLENR28/VGSXj+Li1V7A0s+nX9E\\nyxuGrDY4WMxSXHkW2yjrDnPUs6dXLFk1HBQPaHrs3i6gGyNXfTNWUJ3nGQwZIqJI\\na1b4TMiGVapq33qCo/3Yi6jQ+I6KnpmWgQ7y5LXhoQKBgQDuA80oWCXQv7MERg91\\nFwammtXrMjoD234u3RGNtnU67yH87kvL+p18EiNlbmy+CWyoc1mOjLtTHvMBfMGh\\nfKt3BSuzrZZArA1GJF6J2Rew5dkJGzwPogLSnXMgrVwknAejKJw97wTJzzIZuuSc\\nb7P57+mFoSdR+eSb44WFcuMyoQKBgQDqLu9LWz+LcljDWDeMQ4kl8gkNZMe5//Qd\\nOpa6mN6T2nfRgxasaLo7WO8TqT4X28eBJKuru4BOeHVx0Y8GvWYyaW0uEEycdXVl\\n6man+YUhZezTjjB/nCeaz7E7LCcUao1JP2Y9xlnpO5jdyi2tYkCqu7vOxmnLArN/\\nl3zuXgrkLwKBgEzCzReF1ixMpt9p+PI6StrQdM01laBI2ZkjktWxUn1/Qebgs3FF\\nkiTBdMjxpABl6bUp/mgK2x8jjBuesJP0MRhhgoagJSUWV/GXKSYr7YgPmL9nGSex\\niFeEj+yp/F2SNKRaJImU3GZ5fB7wN2p8W/7vcNC3+IZnoWLlLdqsAroBAoGAdzZh\\nVoki9gfFq9uym1Kd9JUbipftHIBxcpeqt16un7GtIRiMaEP/2cpSGj4jf92/17wl\\nMA0JKekkUEaPeqzb43nLvJFLjrI0iyciDwx0eyX5w1A03CFP//0OicLWOgxr1AfU\\nMkpQ5uwRy4XqbsL/jGp5Fq/mlxPO8HrbfDSfcr0CgYEAxN/RMCYODz+p9xZ6tbiS\\nfHFrCgvPpYR9hEWhb/DyT4Q/OSzk0TItuSXGc3uicYeIycHIndyWej/a1HGg0IRK\\nqjGbqGvRJIrzhLvLog1oOGADFSE2IJrxV2m9lQG8IUow4QUFcoZaCXZAQEvWeo+D\\nq+4Pe2w4aMZeyqpt/mOSGzQ=\\n-----END PRIVATE KEY-----\\n\",\"client_email\": \"skm-bq-test@hasura-development.iam.gserviceaccount.com\",\"client_id\": \"116460406056940511807\",\"auth_uri\": \"https://accounts.google.com/o/oauth2/auth\",\"token_uri\": \"https://oauth2.googleapis.com/token\",\"auth_provider_x509_cert_url\": \"https://www.googleapis.com/oauth2/v1/certs\",\"client_x509_cert_url\": \"https://www.googleapis.com/robot/v1/metadata/x509/skm-bq-test%40hasura-development.iam.gserviceaccount.com\",\"universe_domain\": \"googleapis.com\"}" +# HASURA_BIGQUERY_PROJECT_ID: "hasura-development" +# HASURA_BIGQUERY_DATASET_ID: "chinook_sample" +# RUST_LOG: INFO + +# # scream into Slack if something goes wrong +# - name: Report Status +# if: github.ref == 'refs/heads/main' +# uses: ravsamhq/notify-slack-action@v2 +# with: +# status: ${{ job.status }} +# notify_when: failure +# notification_title: "😧 Error on <{repo_url}|{repo}>" +# message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" +# env: +# SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/check-format.yaml b/.github/workflows/check-format.yaml new file mode 100644 index 0000000..539c296 --- /dev/null +++ b/.github/workflows/check-format.yaml @@ -0,0 +1,36 @@ +name: DynamoDB NDC format + +on: + push: + +jobs: + cargo-fmt: + name: check formatting with cargo fmt + runs-on: ubuntu-latest + env: + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + RUSTFLAGS: "-D warnings" # fail on warnings + steps: + - uses: actions/checkout@v4 + + - name: install tools + run: | + rustup show + + - uses: Swatinem/rust-cache@v2 + + - name: check formatting + run: | + cargo fmt --all --check + + # scream into Slack if something goes wrong + - name: Report Status + if: github.ref == 'refs/heads/main' + uses: ravsamhq/notify-slack-action@v2 + with: + status: ${{ job.status }} + notify_when: failure + notification_title: "😧 Error on <{repo_url}|{repo}>" + message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/nix-check.yaml b/.github/workflows/nix-check.yaml new file mode 100644 index 0000000..848ed51 --- /dev/null +++ b/.github/workflows/nix-check.yaml @@ -0,0 +1,51 @@ +name: DynamoDB NDC nix check + +on: + push: + +jobs: + nix-flake-check: + name: nix flake check + runs-on: ubuntu-latest + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v4 + + - name: Install Nix ❄ + uses: cachix/install-nix-action@V28 + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up the Nix Cache 🔌 + uses: cachix/cachix-action@v15 + with: + name: hasura-v3-dev + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + useDaemon: false # attempt to stop hanging on cleanup + + - name: nix flake check + run: | + nix flake check --print-build-logs + + nix-develop: + name: nix develop + runs-on: ubuntu-latest + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v4 + + - name: Install Nix ❄ + uses: cachix/install-nix-action@V28 + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up the Nix Cache 🔌 + uses: cachix/cachix-action@v15 + with: + name: hasura-v3-dev + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + useDaemon: false # attempt to stop hanging on cleanup + + - name: nix develop --command true + run: | + nix develop --print-build-logs --command true diff --git a/.github/workflows/ship.yaml b/.github/workflows/ship.yaml new file mode 100644 index 0000000..f72856f --- /dev/null +++ b/.github/workflows/ship.yaml @@ -0,0 +1,249 @@ +name: ship + +on: + push: + +jobs: + nix-build: + name: nix build + runs-on: ubuntu-latest + strategy: + matrix: + target: + - x86_64-linux + - aarch64-linux + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v4 + + - name: Install Nix ❄ + uses: cachix/install-nix-action@V28 + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up the Nix Cache 🔌 + uses: cachix/cachix-action@v15 + with: + name: hasura-v3-dev + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + useDaemon: false # attempt to stop hanging on cleanup + + - name: Build a binary with Nix + run: nix build --print-build-logs '.#${{ matrix.target }}' + + - name: Build a Docker image with Nix + run: nix build --print-build-logs '.#docker-${{ matrix.target }}' + + # scream into Slack if something goes wrong + - name: Report Status + if: always() && github.ref == 'refs/heads/main' + uses: ravsamhq/notify-slack-action@v2 + with: + status: ${{ job.status }} + notify_when: failure + notification_title: "😧 Error on <{repo_url}|{repo}>" + message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} + + push-docker-images: + name: push Docker images + needs: + - nix-build + runs-on: ubuntu-latest + # Only run on the `main` branch or version tags. + # Note we currently tag the image with 'latest', so will want to stop doing + # so if we run this on PR branches, etc. + if: (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')) + permissions: + contents: read + id-token: write + packages: write + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v4 + + - name: Install Nix ❄ + uses: cachix/install-nix-action@V28 + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up the Nix Cache 🔌 + uses: cachix/cachix-action@v15 + with: + name: hasura-v3-dev + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + useDaemon: false # attempt to stop hanging on cleanup + + - id: gcloud-auth + name: Authenticate to Google Cloud 🔑 + uses: google-github-actions/auth@v2 + with: + token_format: access_token + service_account: "hasura-ci-docker-writer@hasura-ddn.iam.gserviceaccount.com" + workload_identity_provider: "projects/1025009031284/locations/global/workloadIdentityPools/hasura-ddn/providers/github" + + - name: Login to Google Container Registry 📦 + uses: "docker/login-action@v3" + with: + registry: "us-docker.pkg.dev" + username: "oauth2accesstoken" + password: "${{ steps.gcloud-auth.outputs.access_token }}" + + - name: Login to GitHub Container Registry 📦 + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Push Docker images to Google Container Registry 🚢 + run: nix run .#publish-docker-image '${{ github.ref }}' 'us-docker.pkg.dev/hasura-ddn/ddn/ndc-dynamodb' + + - name: Push Docker images to GitHub Packages 🚢 + run: nix run .#publish-docker-image '${{ github.ref }}' 'ghcr.io/hasura/ndc-dynamodb' + + # scream into Slack if something goes wrong + - name: Report Status + if: always() + uses: ravsamhq/notify-slack-action@v2 + with: + status: ${{ job.status }} + notify_when: failure + notification_title: "😧 Error on <{repo_url}|{repo}>" + message_format: "🐴 *{workflow}* {status_message} for <{repo_url}|{repo}>" + env: + SLACK_WEBHOOK_URL: ${{ secrets.BROKEN_BUILD_SLACK_WEBHOOK_URL }} + + build-cli-binaries: + name: build the CLI binaries + strategy: + matrix: + include: + - runner: ubuntu-20.04 + target: x86_64-unknown-linux-gnu + - runner: ubuntu-20.04 + target: aarch64-unknown-linux-gnu + linux-packages: gcc-aarch64-linux-gnu + linker: /usr/bin/aarch64-linux-gnu-gcc + - runner: macos-latest + target: x86_64-apple-darwin + - runner: macos-latest + target: aarch64-apple-darwin + - runner: windows-latest + target: x86_64-pc-windows-msvc + extension: .exe + extra-rust-flags: "-C target-feature=+crt-static" + runs-on: ${{ matrix.runner }} + env: + CARGO_BUILD_TARGET: ${{ matrix.target }} + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + RUSTFLAGS: "-D warnings" # fail on warnings + defaults: + run: + shell: bash + steps: + - uses: actions/checkout@v4 + + - name: install protoc + uses: arduino/setup-protoc@v3 + with: + version: "25.x" + repo-token: ${{ secrets.GITHUB_TOKEN }} + + - name: install tools + run: | + rustup show + rustup target add ${{ matrix.target }} + + - name: install other packages required + if: matrix.linux-packages + run: | + sudo apt-get update + sudo apt-get install -y ${{ matrix.linux-packages }} + + - uses: Swatinem/rust-cache@v2 + with: + shared-key: "build-${matrix.runner}" # share the cache across jobs + + - name: build the CLI + run: | + # If we're on a tag, use the tag name as the release version. + if [[ "$GITHUB_REF_TYPE" == 'tag' ]]; then + # Ensure that the version specified in Cargo.toml is the same as the tag (with a 'v' prefix). + CARGO_VERSION="$(cargo metadata --format-version=1 | jq -r '.packages | .[] | select(.name == "ndc-dynamodb") | .version')" + echo "Git tag: ${GITHUB_REF_NAME}" + echo "Cargo version: ${CARGO_VERSION}" + + if [[ "$GITHUB_REF_NAME" != "v${CARGO_VERSION}" ]]; then + echo >&2 "The Git tag is \"${GITHUB_REF_NAME}\", but the version in Cargo.toml is \"${CARGO_VERSION}\"." + echo >&2 'These must be the same, with a "v" prefix for the tag. Aborting.' + exit 1 + fi + export RELEASE_VERSION="$GITHUB_REF_NAME" + echo "RELEASE_VERSION = ${RELEASE_VERSION}" + fi + + if [[ -n '${{ matrix.linker }}' ]]; then + TARGET_SCREAMING="$(echo '${{ matrix.target }}' | tr '[:lower:]' '[:upper:]' | tr '-' '_')" + echo "CARGO_TARGET_${TARGET_SCREAMING}_LINKER"='${{ matrix.linker }}' + declare "CARGO_TARGET_${TARGET_SCREAMING}_LINKER"='${{ matrix.linker }}' + export "CARGO_TARGET_${TARGET_SCREAMING}_LINKER" + fi + + if [[ -n '${{ matrix.extra-rust-flags }}' ]]; then + RUSTFLAGS="${RUSTFLAGS} ${{ matrix.extra-rust-flags }}" + export RUSTFLAGS + fi + echo "RUSTFLAGS = ${RUSTFLAGS}" + + echo "Building for target: ${CARGO_BUILD_TARGET}" + cargo build --release --package=ndc-dynamodb-cli + + mkdir -p release + mv -v target/${{ matrix.target }}/release/ndc-dynamodb-cli release/ndc-dynamodb-cli-${{ matrix.target }}${{ matrix.extension }} + + - uses: actions/upload-artifact@v4 + with: + name: ndc-dynamodb-cli-${{ matrix.target }} + path: release + if-no-files-found: error + + release: + name: release to GitHub + needs: + - push-docker-images # not strictly necessary, but if this fails, we should abort + - build-cli-binaries + runs-on: ubuntu-latest + # We release when a tag is pushed. + if: startsWith(github.ref, 'refs/tags/v') + steps: + - uses: actions/checkout@v4 + + - uses: actions/download-artifact@v4 + with: + path: release/artifacts + merge-multiple: true + + - name: generate SHA-256 checksums + run: | + cd release/artifacts + sha256sum * > ./sha256sum + + - name: generate a changelog + run: | + ./scripts/release-notes.py "${GITHUB_REF_NAME}" >> release/notes.md + + - name: generate a connector package + run: | + chmod +x ./release/artifacts/ndc-dynamodb-cli-* + mkdir release/package + ./release/artifacts/ndc-dynamodb-cli-x86_64-unknown-linux-gnu --context=release/package initialize --with-metadata + tar vczf release/artifacts/package.tar.gz -C release/package . + + - name: create a draft release + uses: ncipollo/release-action@v1 + with: + draft: true + bodyFile: release/notes.md + artifacts: release/artifacts/* diff --git a/.gitignore b/.gitignore index 40b878d..ea8c4bf 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1 @@ -node_modules/ \ No newline at end of file +/target diff --git a/.nvmrc b/.nvmrc deleted file mode 100644 index 6aab9b4..0000000 --- a/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -v18.18.0 diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..9415e12 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,4272 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "getrandom", + "once_cell", + "serde", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "allocator-api2" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23a1e53f0f5d86382dafe1cf314783b2044280f406e7e1506368220ad11b1338" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8365de52b16c035ff4fcafe0092ba9390540e3e352870ac09933bebcaa2c8c56" + +[[package]] +name = "anstyle-parse" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" +dependencies = [ + "anstyle", + "windows-sys 0.59.0", +] + +[[package]] +name = "anyhow" +version = "1.0.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" + +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-trait" +version = "0.1.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "aws-config" +version = "1.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d6448cfb224dd6a9b9ac734f58622dd0d4751f3589f3b777345745f46b2eb14" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sdk-sso", + "aws-sdk-ssooidc", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "hex", + "http 0.2.12", + "ring", + "time", + "tokio", + "tracing", + "url", + "zeroize", +] + +[[package]] +name = "aws-credential-types" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60e8f6b615cb5fc60a98132268508ad104310f0cfb25a1c22eee76efdf9154da" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "zeroize", +] + +[[package]] +name = "aws-runtime" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a10d5c055aa540164d9561a0e2e74ad30f0dcf7393c3a92f6733ddf9c5762468" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 0.2.12", + "http-body 0.4.6", + "once_cell", + "percent-encoding", + "pin-project-lite", + "tracing", + "uuid", +] + +[[package]] +name = "aws-sdk-dynamodb" +version = "1.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e896ff0a1711722e7411ec92bad66613506199c4c9ac810140b975bc8da5c27" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sso" +version = "1.47.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8776850becacbd3a82a4737a9375ddb5c6832a51379f24443a98e61513f852c" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-ssooidc" +version = "1.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0007b5b8004547133319b6c4e87193eee2a0bcb3e4c18c75d09febe9dab7b383" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sts" +version = "1.47.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fffaa356e7f1c725908b75136d53207fa714e348f365671df14e95a60530ad3" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "1.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5619742a0d8f253be760bfbb8e8e8368c69e3587e4637af5754e488a611499b1" +dependencies = [ + "aws-credential-types", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "form_urlencoded", + "hex", + "hmac", + "http 0.2.12", + "http 1.1.0", + "once_cell", + "percent-encoding", + "sha2", + "time", + "tracing", +] + +[[package]] +name = "aws-smithy-async" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62220bc6e97f946ddd51b5f1361f78996e704677afc518a4ff66b7a72ea1378c" +dependencies = [ + "futures-util", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "aws-smithy-http" +version = "0.60.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c8bc3e8fdc6b8d07d976e301c02fe553f72a39b7a9fea820e023268467d7ab6" +dependencies = [ + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http-body 0.4.6", + "once_cell", + "percent-encoding", + "pin-project-lite", + "pin-utils", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.60.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4683df9469ef09468dad3473d129960119a0d3593617542b7d52086c8486f2d6" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-query" +version = "0.60.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2fbd61ceb3fe8a1cb7352e42689cec5335833cd9f94103a61e98f9bb61c64bb" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-runtime" +version = "1.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be28bd063fa91fd871d131fc8b68d7cd4c5fa0869bea68daca50dcb1cbd76be2" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "fastrand", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "http-body 1.0.1", + "httparse", + "hyper", + "hyper-rustls", + "once_cell", + "pin-project-lite", + "pin-utils", + "rustls 0.21.12", + "tokio", + "tracing", +] + +[[package]] +name = "aws-smithy-runtime-api" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e086682a53d3aa241192aa110fa8dfce98f2f5ac2ead0de84d41582c7e8fdb96" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "bytes", + "http 0.2.12", + "http 1.1.0", + "pin-project-lite", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-types" +version = "1.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07c9cdc179e6afbf5d391ab08c85eac817b51c87e1892a5edb5f7bbdc64314b4" +dependencies = [ + "base64-simd", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http 1.1.0", + "http-body 0.4.6", + "http-body 1.0.1", + "http-body-util", + "itoa", + "num-integer", + "pin-project-lite", + "pin-utils", + "ryu", + "serde", + "time", + "tokio", + "tokio-util", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.60.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab0b0166827aa700d3dc519f72f8b3a91c35d0b8d042dc5d643a91e6f80648fc" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5221b91b3e441e6675310829fd8984801b772cb1546ef6c0e54dec9f1ac13fef" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "rustc_version", + "tracing", +] + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-extra" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ab90e7b70bea63a153137162affb6a0bce26b584c24a4c7885509783e2cf30b" +dependencies = [ + "axum", + "axum-core", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "mime", + "pin-project-lite", + "serde", + "tokio", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +dependencies = [ + "serde", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "build-data" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eda20fcece9c23f3c3f4c2751a8a5ca9491c05fa7a69920af65953c3b39b7ce4" +dependencies = [ + "chrono", + "safe-regex", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytecount" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" + +[[package]] +name = "bytes-utils" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" +dependencies = [ + "bytes", + "either", +] + +[[package]] +name = "cc" +version = "1.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-targets 0.52.6", +] + +[[package]] +name = "clap" +version = "4.5.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" + +[[package]] +name = "colorchoice" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" + +[[package]] +name = "colorful" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97af0562545a7d7f3d9222fcf909963bec36dcb502afaacab98c6ffac8da47ce" + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +dependencies = [ + "serde", +] + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + +[[package]] +name = "event-listener" +version = "5.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "fancy-regex" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2" +dependencies = [ + "bit-set", + "regex", +] + +[[package]] +name = "fastrand" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" + +[[package]] +name = "flate2" +version = "1.0.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "flume" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" +dependencies = [ + "futures-core", + "futures-sink", + "spin", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fraction" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3027ae1df8d41b4bed2241c8fdad4acc1e7af60c8e17743534b545e77182d678" +dependencies = [ + "lazy_static", + "num", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.6.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] + +[[package]] +name = "hashbrown" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" + +[[package]] +name = "hashlink" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +dependencies = [ + "hashbrown 0.14.5", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" + +[[package]] +name = "httparse" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper", + "log", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +dependencies = [ + "equivalent", + "hashbrown 0.15.0", + "serde", +] + +[[package]] +name = "insta" +version = "1.41.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e9ffc4d4892617c50a928c52b2961cb5174b6fc6ebf252b2fac9d21955c48b8" +dependencies = [ + "console", + "lazy_static", + "linked-hash-map", + "serde", + "similar", +] + +[[package]] +name = "ipnet" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "iso8601" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924e5d73ea28f59011fec52a0d12185d496a9b075d360657aed2a5707f701153" +dependencies = [ + "nom", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonschema" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a071f4f7efc9a9118dfb627a0a94ef247986e1ab8606a4c806ae2b3aa3b6978" +dependencies = [ + "ahash", + "anyhow", + "base64 0.21.7", + "bytecount", + "clap", + "fancy-regex", + "fraction", + "getrandom", + "iso8601", + "itoa", + "memchr", + "num-cmp", + "once_cell", + "parking_lot", + "percent-encoding", + "regex", + "reqwest", + "serde", + "serde_json", + "time", + "url", + "uuid", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] + +[[package]] +name = "libc" +version = "0.2.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" + +[[package]] +name = "libm" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a00419de735aac21d53b0de5ce2c03bd3627277cf471300f27ebc89f7d828047" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "mimalloc" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633" +dependencies = [ + "libmimalloc-sys", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +dependencies = [ + "hermit-abi", + "libc", + "wasi", + "windows-sys 0.52.0", +] + +[[package]] +name = "multimap" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1a5d38b9b352dbd913288736af36af41c48d61b1a8cd34bcecd727561b7d511" +dependencies = [ + "serde", +] + +[[package]] +name = "native-tls" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "ndc-dynamodb" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "aws-config", + "aws-sdk-dynamodb", + "mimalloc", + "ndc-dynamodb-configuration", + "ndc-sdk", + "percent-encoding", + "prometheus", + "query-engine-execution", + "query-engine-metadata", + "query-engine-sql", + "query-engine-translation", + "serde_json", + "sqlx", + "thiserror", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "ndc-dynamodb-cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "build-data", + "clap", + "insta", + "ndc-dynamodb-configuration", + "serde", + "serde_json", + "serde_yaml", + "tempfile", + "thiserror", + "tokio", +] + +[[package]] +name = "ndc-dynamodb-configuration" +version = "0.1.0" +dependencies = [ + "anyhow", + "aws-config", + "aws-sdk-dynamodb", + "aws-smithy-http", + "clap", + "jsonschema", + "ndc-models", + "prometheus", + "query-engine-metadata", + "query-engine-sql", + "schemars", + "serde", + "serde_json", + "smol_str", + "sqlx", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "ndc-models" +version = "0.1.6" +source = "git+https://github.com/hasura/ndc-spec.git?tag=v0.1.6#d1be19e9cdd86ac7b6ad003ff82b7e5b4e96b84f" +dependencies = [ + "indexmap 2.6.0", + "ref-cast", + "schemars", + "serde", + "serde_json", + "serde_with", + "smol_str", +] + +[[package]] +name = "ndc-sdk" +version = "0.4.0" +source = "git+https://github.com/hasura/ndc-sdk-rs.git?tag=v0.4.0#665509f7d3b47ce4f014fc23f817a3599ba13933" +dependencies = [ + "async-trait", + "axum", + "axum-extra", + "bytes", + "clap", + "http 0.2.12", + "mime", + "ndc-models", + "ndc-test", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-otlp", + "opentelemetry-semantic-conventions", + "opentelemetry-zipkin", + "opentelemetry_sdk", + "prometheus", + "reqwest", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower-http", + "tracing", + "tracing-opentelemetry", + "tracing-subscriber", + "url", +] + +[[package]] +name = "ndc-test" +version = "0.1.6" +source = "git+https://github.com/hasura/ndc-spec.git?tag=v0.1.6#d1be19e9cdd86ac7b6ad003ff82b7e5b4e96b84f" +dependencies = [ + "async-trait", + "clap", + "colorful", + "indexmap 2.6.0", + "ndc-models", + "rand", + "reqwest", + "semver", + "serde", + "serde_json", + "smol_str", + "thiserror", + "tokio", + "url", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" +dependencies = [ + "num-bigint", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-cmp" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa" + +[[package]] +name = "num-complex" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "object" +version = "0.36.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "openssl" +version = "0.10.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" +dependencies = [ + "bitflags 2.6.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "opentelemetry" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900d57987be3f2aeb70d385fff9b27fb74c5723cc9a52d904d4f9c807a0667bf" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "once_cell", + "pin-project-lite", + "thiserror", + "urlencoding", +] + +[[package]] +name = "opentelemetry-http" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7690dc77bf776713848c4faa6501157469017eaf332baccd4eb1cea928743d94" +dependencies = [ + "async-trait", + "bytes", + "http 0.2.12", + "opentelemetry", + "reqwest", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a016b8d9495c639af2145ac22387dcb88e44118e45320d9238fbf4e7889abcb" +dependencies = [ + "async-trait", + "futures-core", + "http 0.2.12", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-proto", + "opentelemetry-semantic-conventions", + "opentelemetry_sdk", + "prost", + "reqwest", + "thiserror", + "tokio", + "tonic", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8fddc9b68f5b80dae9d6f510b88e02396f006ad48cac349411fbecc80caae4" +dependencies = [ + "opentelemetry", + "opentelemetry_sdk", + "prost", + "tonic", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9ab5bd6c42fb9349dcf28af2ba9a0667f697f9bdcca045d39f2cec5543e2910" + +[[package]] +name = "opentelemetry-zipkin" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6943c09b1b7c17b403ae842b00f23e6d5fc6f5ec06cccb3f39aca97094a899a" +dependencies = [ + "async-trait", + "futures-core", + "http 0.2.12", + "once_cell", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-semantic-conventions", + "opentelemetry_sdk", + "reqwest", + "serde", + "serde_json", + "thiserror", + "typed-builder", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e90c7113be649e31e9a0f8b5ee24ed7a16923b322c3c5ab6367469c049d6b7e" +dependencies = [ + "async-trait", + "crossbeam-channel", + "futures-channel", + "futures-executor", + "futures-util", + "glob", + "once_cell", + "opentelemetry", + "ordered-float", + "percent-encoding", + "rand", + "thiserror", + "tokio", + "tokio-stream", +] + +[[package]] +name = "ordered-float" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83e7ccb95e240b7c9506a3d544f10d935e142cc90b0a1d56954fb44d89ad6b97" +dependencies = [ + "num-traits", +] + +[[package]] +name = "outref" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro2" +version = "1.0.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prometheus" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1" +dependencies = [ + "cfg-if", + "fnv", + "lazy_static", + "memchr", + "parking_lot", + "protobuf", + "thiserror", +] + +[[package]] +name = "prost" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "protobuf" +version = "2.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" + +[[package]] +name = "query-engine-execution" +version = "0.1.0" +dependencies = [ + "aws-config", + "aws-sdk-dynamodb", + "base64 0.22.1", + "bytes", + "ndc-models", + "prometheus", + "query-engine-sql", + "serde_json", + "sqlformat", + "sqlx", + "thiserror", + "tracing", +] + +[[package]] +name = "query-engine-metadata" +version = "0.1.0" +dependencies = [ + "ndc-models", + "schemars", + "serde", + "smol_str", +] + +[[package]] +name = "query-engine-sql" +version = "0.1.0" +dependencies = [ + "ndc-models", + "schemars", + "serde", + "serde_json", + "smol_str", +] + +[[package]] +name = "query-engine-translation" +version = "0.1.0" +dependencies = [ + "anyhow", + "indexmap 2.6.0", + "insta", + "multimap", + "ndc-dynamodb-configuration", + "ndc-models", + "ndc-sdk", + "query-engine-metadata", + "query-engine-sql", + "ref-cast", + "serde_json", + "sqlformat", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "quote" +version = "1.0.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redox_syscall" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" +dependencies = [ + "bitflags 2.6.0", +] + +[[package]] +name = "ref-cast" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf0a6f84d5f1d581da8b41b47ec8600871962f2a528115b542b362d4b744931" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.8", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-lite" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "hyper", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "mime_guess", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile 1.0.4", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rsa" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core", + "signature", + "spki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.38.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa260229e6538e52293eeb577aabd09945a09d6d9cc0fc550ed7529056c2e32a" +dependencies = [ + "bitflags 2.6.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.7", + "sct", +] + +[[package]] +name = "rustls" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" +dependencies = [ + "log", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls" +version = "0.23.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "safe-proc-macro2" +version = "1.0.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fd85be67db87168aa3c13fd0da99f48f2ab005dccad5af5626138dc1df20eb6" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "safe-quote" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77e530f7831f3feafcd5f1aae406ac205dd998436b4007c8e80f03eca78a88f7" +dependencies = [ + "safe-proc-macro2", +] + +[[package]] +name = "safe-regex" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5194fafa3cb9da89e0cab6dffa1f3fdded586bd6396d12be11b4cae0c7ee45c2" +dependencies = [ + "safe-regex-macro", +] + +[[package]] +name = "safe-regex-compiler" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e822ae1e61251bcfd698317c237cf83f7c57161a5dc24ee609a85697f1ed15b3" +dependencies = [ + "safe-proc-macro2", + "safe-quote", +] + +[[package]] +name = "safe-regex-macro" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2768de7e6ef19f59c5fd3c3ac207ef12b68a49f95e3172d67e4a04cfd992ca06" +dependencies = [ + "safe-proc-macro2", + "safe-regex-compiler", +] + +[[package]] +name = "schannel" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "schemars" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" +dependencies = [ + "dyn-clone", + "indexmap 1.9.3", + "indexmap 2.6.0", + "schemars_derive", + "serde", + "serde_json", + "smol_str", +] + +[[package]] +name = "schemars_derive" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.6.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" + +[[package]] +name = "serde" +version = "1.0.214" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.214" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.132" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +dependencies = [ + "indexmap 2.6.0", + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +dependencies = [ + "itoa", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.6.0", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.6.0", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + +[[package]] +name = "similar" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +dependencies = [ + "serde", +] + +[[package]] +name = "smol_str" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fad6c857cbab2627dcf01ec85a623ca4e7dcb5691cbaa3d7fb7653671f0d09c9" +dependencies = [ + "serde", +] + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "sqlformat" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" +dependencies = [ + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93334716a037193fac19df402f8571269c84a00852f6a7066b5d2616dcd64d3e" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4d8060b456358185f7d50c55d9b5066ad956956fddec42ee2e8567134a8936e" +dependencies = [ + "atoi", + "byteorder", + "bytes", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-channel", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.14.5", + "hashlink", + "hex", + "indexmap 2.6.0", + "log", + "memchr", + "once_cell", + "paste", + "percent-encoding", + "rustls 0.23.16", + "rustls-pemfile 2.2.0", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlformat", + "thiserror", + "tokio", + "tokio-stream", + "tracing", + "url", + "uuid", + "webpki-roots", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cac0692bcc9de3b073e8d747391827297e075c7710ff6276d9f7a1f3d58c6657" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1804e8a7c7865599c9c79be146dc8a9fd8cc86935fa641d3ea58e5f0688abaa5" +dependencies = [ + "dotenvy", + "either", + "heck", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn", + "tempfile", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64bb4714269afa44aef2755150a0fc19d756fb580a67db8885608cf02f47d06a" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.6.0", + "byteorder", + "bytes", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fa91a732d854c5d7726349bb4bb879bb9478993ceb764247660aee25f67c2f8" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.6.0", + "byteorder", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5b2cf34a45953bfd3daaf3db0f7a7878ab9b7a6b91b422d24a7a9e4c857b680" +dependencies = [ + "atoi", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" +dependencies = [ + "cfg-if", + "fastrand", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "thiserror" +version = "1.0.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls 0.22.4", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tonic" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76c4eb7a4e9ef9d4763600161f12f5070b92a578e1b634db88a6887844c91a13" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.21.7", + "bytes", + "flate2", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "rustls-native-certs 0.7.3", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.25.0", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" +dependencies = [ + "bitflags 2.6.0", + "bytes", + "futures-core", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "http-range-header", + "mime", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9be14ba1bbe4ab79e9229f7f89fab8d120b865859f10527f31c033e599d2284" +dependencies = [ + "js-sys", + "once_cell", + "opentelemetry", + "opentelemetry_sdk", + "smallvec", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", + "web-time", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "thread_local", + "tracing", + "tracing-core", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typed-builder" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77739c880e00693faef3d65ea3aad725f196da38b22fdc7ea6ded6e1ce4d3add" +dependencies = [ + "typed-builder-macro", +] + +[[package]] +name = "typed-builder-macro" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f718dfaf347dcb5b983bfc87608144b0bad87970aebcbea5ce44d2a30c08e63" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicase" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" + +[[package]] +name = "unicode-bidi" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" + +[[package]] +name = "unicode-ident" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "uuid" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" + +[[package]] +name = "web-sys" +version = "0.3.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.26.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "whoami" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" +dependencies = [ + "redox_syscall", + "wasite", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "xmlparser" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..6b30552 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,74 @@ +[workspace] +resolver= "2" + +package.version = "0.1.0" +package.edition = "2021" +package.license = "Apache-2.0" + +members = [ + "crates/cli", + "crates/ndc-dynamodb", + "crates/configuration", + "crates/query-engine/metadata", + "crates/query-engine/sql", + "crates/query-engine/translation", +] + +[workspace.lints.clippy] +all = { level = "warn", priority = -1 } +pedantic = { level = "warn", priority = -1 } +# disable certain pedantic warnings +doc_markdown = "allow" +missing_errors_doc = "allow" +missing_panics_doc = "allow" +module_name_repetitions = "allow" +must_use_candidate = "allow" +wildcard_imports = "allow" +# unstable warnings; we might need to suppress them +redundant_clone = "warn" +# disable these for now, but we should probably fix them +similar_names = "allow" +too_many_lines = "allow" + +[workspace.dependencies] +ndc-models = { git = "https://github.com/hasura/ndc-spec.git", tag = "v0.1.6" } +ndc-sdk = { git = "https://github.com/hasura/ndc-sdk-rs.git", tag = "v0.4.0" } +ndc-test = { git = "https://github.com/hasura/ndc-spec.git", tag = "v0.1.6" } + +anyhow = "1" +async-trait = "0.1" +axum = "0.6" +axum-test-helper = "0.3" +build-data = "0.2" +bytes = "1" +clap = "4" +env_logger = "0.11" +hyper = "0.14" +indexmap = "2" +insta = "1" +jsonschema = "0.17" +mimalloc = "0.1" +multimap = "0.9" +nonempty = "0.10" +percent-encoding = "2" +prometheus = "0.13" +ref-cast = "1" +reqwest = "0.11" +schemars = "0.8" +serde = "1" +serde_json = "1" +serde_yaml = "0.9" +similar-asserts = "1" +smol_str = "0.1" +sqlformat = "0.2" +sqlx = { version = "0.8", default-features = false, features = ["postgres", "derive"] } +tempfile = "3" +test-each = "0.2" +thiserror = "1" +tokio = "1" +tracing = "0.1" +url = "2" +uuid = "1" +aws-config = { version = "1.1.7", features = ["behavior-version-latest"] } +aws-sdk-dynamodb = "1.50.0" +base64 = "0.22.1" \ No newline at end of file diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index f992326..0000000 --- a/Dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -FROM node:18-alpine - -WORKDIR /app -COPY package.json . -COPY package-lock.json . - -RUN npm ci - -COPY tsconfig.json . -COPY src src - -# This is just to ensure everything compiles ahead of time. -# We'll actually run using ts-node to ensure we get TypesScript -# stack traces if something fails at runtime. -RUN npm run typecheck - -EXPOSE 8100 -EXPOSE 9100 - -# We don't bother doing typechecking when we run (only TS->JS transpiling) -# because we checked it above already. This uses less memory at runtime. -ENTRYPOINT [ "npm", "run", "--silent", "start-no-typecheck", "--" ] diff --git a/changelog.md b/changelog.md new file mode 100644 index 0000000..5e1a9b7 --- /dev/null +++ b/changelog.md @@ -0,0 +1,24 @@ +# Changelog + +## [Unreleased] + +### Added + +### Changed + +### Fixed + +## [v0.1.0] - 2024-11-29 + +### Added + +- Initial release with support of ndc-spec v0.1.6 + +### Changed + +### Fixed + + + +[Unreleased]: https://github.com/hasura/ndc-dynamodb/compare/v0.2.0...HEAD +[v0.1.1]: https://github.com/hasura/ndc-dynamodb/releases/tag/v0.1.0 diff --git a/ci/deploy.sh b/ci/deploy.sh new file mode 100644 index 0000000..55880d2 --- /dev/null +++ b/ci/deploy.sh @@ -0,0 +1,139 @@ +#!/usr/bin/env bash +# +# To get the skopeo dependency automatically, run with: +# +# $ nix run .#publish-docker-image +# +set -euo pipefail + +DRY_RUN=false +if [[ "${1:-}" == '-n' || "${1:-}" == '--dry-run' ]]; then + DRY_RUN=true + echo "$(tput bold)$(tput setaf 1)DRY RUN; some steps will be skipped$(tput sgr0)" + shift +fi + +if [[ $# -ne 2 ]]; then + echo >&2 "Usage: ${0} [-n|--dry-run] REF IMAGE" + echo >&2 + echo >&2 ' REF should be in the form "refs/heads/" or "refs/tags/"' + echo >&2 ' (in a Github workflow the variable "github.ref" has this format)' + echo >&2 + echo >&2 ' IMAGE is the path of the Docker image, e.g. "ghcr.io/hasura/ndc-postgres"' + echo >&2 + echo >&2 ' "--dry-run" will not push anything, but it will still build' + exit 1 +fi + +github_ref="$1" +image="$2" + +# Runs the given command, unless `--dry-run` was set. +function run { + if "$DRY_RUN"; then + echo "$(tput bold)$(tput setaf 1)not running:$(tput sgr0) $*" + else + echo "$(tput bold)$(tput setaf 2)running:$(tput sgr0) $*" + "$@" + fi +} + +# Assumes that the given ref is a branch name. Sets a tag for a docker image of +# the form: +# +# dev-main-bffd555 +# --- ---- ------- +# ↑ ↑ ↑ +# prefix "dev" | commit hash +# branch +# +# Additionally sets a branch tag assuming this is the latest tag for the given +# branch. The branch tag has the form: dev-main +# Also sets the 'latest' tag +# Also sets a tag with just the branch short hash +function set_dev_tags { + local branch="$1" + # replace '.' and '/' in branch name with '-' + local tidy_branch + tidy_branch="$(tr './' '-' <<< "$branch")" + local branch_prefix="dev-${tidy_branch}" + local version + local short_hash + short_hash="$(git rev-parse --short=9 HEAD)" + version="${branch_prefix}-${short_hash}" + export docker_tags=("$version" "$branch_prefix" "$short_hash" "latest") +} + +# The Github workflow passes a ref of the form refs/heads/ or +# refs/tags/. This function sets an array of docker image tags based +# on either the given branch or tag name. +# +# If a tag name does not start with a "v" it is assumed to not be a release tag +# so the function sets an empty array. +# +# If the input does look like a release tag, set the tag name as the sole docker +# tag. +# +# If the input is a branch, set docker tags via `set_dev_tags`. +function set_docker_tags { + local input + input="$1" + if [[ $input =~ ^refs/tags/(v.*)$ ]]; then + local tag="${BASH_REMATCH[1]}" + export docker_tags=("$tag" "latest") + elif [[ $input =~ ^refs/heads/(.*)$ ]]; then + local branch="${BASH_REMATCH[1]}" + set_dev_tags "$branch" + else + export docker_tags=("latest") + fi +} + +function publish_multi_arch { + local input + local image_archive + local image_path_for_arch + + architectures=('aarch64' 'x86_64') + + input="$1" + set_docker_tags "$input" + + # do nothing if no tags found + if [[ ${#docker_tags[@]} == 0 ]]; then + echo "The given ref, ${input}, was not a release tag or a branch - will not publish a docker image" + exit + fi + + # build and push the individual images for each architecture + for arch in "${architectures[@]}"; do + # build the docker image + image_archive="docker-archive://$(nix build --print-out-paths ".#docker-${arch}-linux")" + + echo "Will publish docker image with tags: ${docker_tags[*]}" + skopeo inspect "$image_archive" + + image_path_for_arch="${image}-${arch}" + for tag in "${docker_tags[@]}"; do + echo + echo "Pushing docker://${image_path_for_arch}:${tag}" + run skopeo copy "$image_archive" "docker://${image_path_for_arch}:${tag}" + done + done + + # now create and push the manifest + for tag in "${docker_tags[@]}"; do + echo "Creating manifest for ${image}:${tag}" + # create a manifest referencing both architectures + # i did not use a loop here, forgive me + run docker manifest create \ + "$image:$tag" \ + --amend "${image}-aarch64:${tag}" \ + --amend "${image}-x86_64:${tag}" + + # push manifest as the main image + run docker manifest push "${image}:${tag}" + done +} + +publish_multi_arch "$github_ref" diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml new file mode 100644 index 0000000..9755d0f --- /dev/null +++ b/crates/cli/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "ndc-dynamodb-cli" +version.workspace = true +edition.workspace = true +license.workspace = true + +[lints] +workspace = true + +[dependencies] +ndc-dynamodb-configuration = { path = "../configuration" } + +anyhow = { workspace = true } +clap = { workspace = true, features = ["derive", "env"] } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +serde_yaml = { workspace = true } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["full"] } + +[build-dependencies] +build-data = { workspace = true } + +[dev-dependencies] +insta = { workspace = true } +tempfile = { workspace = true } + +[package.metadata.cargo-machete] +ignored = ["build-data", "build_data"] # apparently cargo-machete doesn't find dependencies used by build scripts diff --git a/crates/cli/build.rs b/crates/cli/build.rs new file mode 100644 index 0000000..3b1de99 --- /dev/null +++ b/crates/cli/build.rs @@ -0,0 +1,20 @@ +fn main() { + // Ensure that we rebuild if the version is specified. + println!("cargo:rerun-if-env-changed=RELEASE_VERSION"); + + // On release builds, use the Git commit as a backup if the version is not set. + // On debug builds, don't bother. + // If we fail to get the Git information, give up and proceed anyway. + let build_profile = std::env::var("PROFILE").unwrap(); + if build_profile == "release" && option_env!("RELEASE_VERSION").is_none() { + if let Ok(git_commit_ref) = build_data::get_git_commit_short() { + let git_dirty = build_data::get_git_dirty().unwrap_or(false); + let release_version = if git_dirty { + format!("{git_commit_ref}-dirty") + } else { + git_commit_ref + }; + println!("cargo:rustc-env=RELEASE_VERSION={release_version}"); + } + } +} diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs new file mode 100644 index 0000000..690f75b --- /dev/null +++ b/crates/cli/src/lib.rs @@ -0,0 +1,191 @@ +//! The interpretation of the commands that the CLI can handle. +//! +//! The CLI can do a few things. This provides a central point where those things are routed and +//! then done, making it easier to test this crate deterministically. + +mod metadata; + +use std::path::PathBuf; + +use clap::Subcommand; +use tokio::fs; + +use ndc_dynamodb_configuration as configuration; +use ndc_dynamodb_configuration::environment::Environment; + +const UPDATE_ATTEMPTS: u8 = 3; + +/// The various contextual bits and bobs we need to run. +pub struct Context { + pub context_path: PathBuf, + pub environment: Env, + pub release_version: Option<&'static str>, +} + +/// The command invoked by the user. +#[derive(Debug, Clone, Subcommand)] +pub enum Command { + /// Initialize a configuration in the current (empty) directory. + Initialize { + #[arg(long)] + /// Whether to create the hasura connector metadata. + with_metadata: bool, + }, + /// Update the configuration by introspecting the database, using the configuration options. + Update, + // /// Upgrade the configuration to the latest version. This does not involve the database. + // Upgrade { + // #[arg(long)] + // dir_from: PathBuf, + // #[arg(long)] + // dir_to: PathBuf, + // }, +} + +/// The set of errors that can go wrong _in addition to_ generic I/O or parsing errors. +#[derive(Debug, PartialEq, thiserror::Error)] +pub enum Error { + #[error("directory is not empty")] + DirectoryIsNotEmpty, +} + +/// Run a command in a given directory. +pub async fn run(command: Command, context: Context) -> anyhow::Result<()> { + match command { + Command::Initialize { with_metadata } => initialize(with_metadata, context).await?, + Command::Update => update(context).await?, + // Command::Upgrade { dir_from, dir_to } => upgrade(dir_from, dir_to).await?, + }; + Ok(()) +} + +/// Initialize an empty directory with an empty connector configuration. +/// +/// An empty configuration contains default settings and options, and is expected to be filled with +/// information such as the database connection string by the user, and later on metadata +/// information via introspection. +/// +/// Optionally, this can also create the connector metadata, which is used by the Hasura CLI to +/// automatically work with this CLI as a plugin. +async fn initialize(with_metadata: bool, context: Context) -> anyhow::Result<()> { + // refuse to initialize the directory unless it is empty + let mut items_in_dir = fs::read_dir(&context.context_path).await?; + if items_in_dir.next_entry().await?.is_some() { + Err(Error::DirectoryIsNotEmpty)?; + } + + configuration::write_parsed_configuration( + configuration::ParsedConfiguration::initial(), + &context.context_path, + ) + .await?; + + // if requested, create the metadata + if with_metadata { + let metadata_dir = context.context_path.join(".hasura-connector"); + fs::create_dir(&metadata_dir).await?; + let metadata_file = metadata_dir.join("connector-metadata.yaml"); + let metadata = metadata::ConnectorMetadataDefinition { + packaging_definition: metadata::PackagingDefinition::PrebuiltDockerImage( + metadata::PrebuiltDockerImagePackaging { + docker_image: format!( + "ghcr.io/hasura/ndc-dynamodb:{}", + context.release_version.unwrap_or("latest") + ), + }, + ), + supported_environment_variables: vec![ + metadata::EnvironmentVariableDefinition { + name: "HASURA_DYNAMODB_AWS_ACCESS_KEY_ID".to_string(), + description: "The AWS DynamoDB access key ID".to_string(), + default_value: Some("dynamodbql://read_only_user:readonlyuser@35.236.11.122:5432/v3-docs-sample-app".to_string()), + required: true, + }, + metadata::EnvironmentVariableDefinition { + name: "HASURA_DYNAMODB_AWS_SECRET_ACCESS_KEY".to_string(), + description: "The AWS DynamoDB secret access key".to_string(), + default_value: Some(String::new()), + required: true + }, + // metadata::EnvironmentVariableDefinition { + // name: "HASURA_DYNAMODB_AWS_PROVIDER_NAME".to_string(), + // description: "The AWS DynamoDB provider name".to_string(), + // default_value: Some(String::new()), + // required: true, + // }, + metadata::EnvironmentVariableDefinition { + name: "HASURA_DYNAMODB_AWS_REGION".to_string(), + description: "The AWS DynamoDB region".to_string(), + default_value: Some(String::new()), + required: true, + }, + ], + commands: metadata::Commands { + update: Some("hasura-ndc-dynamodb update".to_string()), + watch: None, + }, + cli_plugin: Some(metadata::CliPluginDefinition { + name: "ndc-dynamodb".to_string(), + version: context.release_version.unwrap_or("latest").to_string(), + }), + docker_compose_watch: vec![metadata::DockerComposeWatchItem { + path: "./".to_string(), + target: Some("/etc/connector".to_string()), + action: metadata::DockerComposeWatchAction::SyncAndRestart, + ignore: vec![], + }], + }; + + fs::write(metadata_file, serde_yaml::to_string(&metadata)?).await?; + } + + Ok(()) +} + +/// Update the configuration in the current directory by introspecting the database. +/// +/// This expects a configuration with a valid connection URI. +async fn update(context: Context) -> anyhow::Result<()> { + // It is possible to change the file in the middle of introspection. + // We want to detect this scenario and retry, or fail if we are unable to. + // We do that with a few attempts. + for _attempt in 1..=UPDATE_ATTEMPTS { + let existing_configuration = + configuration::parse_configuration(&context.context_path).await?; + let output = + configuration::introspect(&existing_configuration.clone(), &context.environment) + .await?; + + // Check that the input file did not change since we started introspecting, + let input_again_before_write = + configuration::parse_configuration(&context.context_path).await?; + + // and skip this attempt if it has. + if input_again_before_write == existing_configuration { + // In order to be sure to capture default values absent in the initial input we have to + // always write out the updated configuration. + configuration::write_parsed_configuration(output, &context.context_path).await?; + return Ok(()); + } + + // If we have reached here, the input file changed before writing. + } + + // We ran out of attempts. + Err(anyhow::anyhow!( + "Cannot override configuration: input changed before write." + )) +} + +// /// Upgrade the configuration in a directory by trying to read it and then write it back +// /// out to a different directory. +// /// +// async fn upgrade(dir_from: PathBuf, dir_to: PathBuf) -> anyhow::Result<()> { +// let old_configuration = configuration::parse_configuration(dir_from).await?; +// let upgraded_configuration = configuration::upgrade_to_latest_version(old_configuration); +// configuration::write_parsed_configuration(upgraded_configuration, dir_to).await?; + +// eprintln!("Upgrade completed successfully. You may need to also run 'update'."); + +// Ok(()) +// } diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs new file mode 100644 index 0000000..3454a3d --- /dev/null +++ b/crates/cli/src/main.rs @@ -0,0 +1,62 @@ +//! The CLI application. This is used to configure a deployment of ndc-dynamodb. +//! +//! This is intended to be automatically downloaded and invoked via the Hasura CLI, as a plugin. +//! It is unlikely that end-users will use it directly. + +use std::env; +use std::path::PathBuf; +use std::process::ExitCode; + +use clap::Parser; + +use ndc_dynamodb_cli::*; +use ndc_dynamodb_configuration as configuration; + +/// The release version specified at build time. +/// +/// We should use the latest version if this is not specified. +const RELEASE_VERSION: Option<&str> = option_env!("RELEASE_VERSION"); + +/// The command-line arguments. +#[derive(Debug, Parser)] +#[command( + version = RELEASE_VERSION.unwrap_or("unknown"), + about = "Configuration tool for ndc-dynamodb" +)] +pub struct Args { + /// The path to the configuration. Defaults to the current directory. + #[arg(long = "context", env = "HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH")] + pub context_path: Option, + /// The command to invoke. + #[command(subcommand)] + pub subcommand: Command, +} + +#[tokio::main] +pub async fn main() -> ExitCode { + if let Err(err) = try_main().await { + // The default formatting for anyhow in our case includes a 'Caused by' section + // that duplicates what's already in the error message, so we don't display it. + eprintln!("ERROR: {err}"); + return ExitCode::FAILURE; + } + ExitCode::SUCCESS +} + +/// The application entrypoint. It pulls information from the environment and then calls the [run] +/// function. The library remains unaware of the environment, so that we can more easily test it. +async fn try_main() -> anyhow::Result<()> { + let args = Args::parse(); + // Default the context path to the current directory. + let context_path = match args.context_path { + Some(path) => path, + None => env::current_dir()?, + }; + let context = Context { + context_path, + environment: configuration::environment::ProcessEnvironment, + release_version: RELEASE_VERSION, + }; + run(args.subcommand, context).await?; + Ok(()) +} diff --git a/crates/cli/src/metadata.rs b/crates/cli/src/metadata.rs new file mode 100644 index 0000000..1257871 --- /dev/null +++ b/crates/cli/src/metadata.rs @@ -0,0 +1,78 @@ +//! Structures that represent the connector metadata definition. +//! +//! See https://github.com/hasura/ndc-hub/blob/main/rfcs/0001-packaging.md#connector-definition. + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ConnectorMetadataDefinition { + pub packaging_definition: PackagingDefinition, + pub supported_environment_variables: Vec, + pub commands: Commands, + #[serde(skip_serializing_if = "Option::is_none")] + pub cli_plugin: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub docker_compose_watch: DockerComposeWatch, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "PascalCase", tag = "type")] +pub enum PackagingDefinition { + PrebuiltDockerImage(PrebuiltDockerImagePackaging), + ManagedDockerBuild, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PrebuiltDockerImagePackaging { + pub docker_image: String, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct EnvironmentVariableDefinition { + pub name: String, + pub description: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub default_value: Option, + pub required: bool, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Commands { + #[serde(skip_serializing_if = "Option::is_none")] + pub update: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub watch: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CliPluginDefinition { + pub name: String, + pub version: String, +} + +pub type DockerComposeWatch = Vec; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DockerComposeWatchItem { + pub path: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub target: Option, + pub action: DockerComposeWatchAction, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub ignore: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum DockerComposeWatchAction { + Rebuild, + Sync, + #[serde(rename = "sync+restart")] + SyncAndRestart, +} diff --git a/crates/configuration/Cargo.toml b/crates/configuration/Cargo.toml new file mode 100644 index 0000000..fb9f224 --- /dev/null +++ b/crates/configuration/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "ndc-dynamodb-configuration" +version.workspace = true +edition.workspace = true +license.workspace = true + +[lints] +workspace = true + +[dependencies] +ndc-models = { workspace = true } +query-engine-metadata = { path = "../query-engine/metadata" } +query-engine-sql = { path = "../query-engine/sql" } + +anyhow = { workspace = true } +# We only use clap for the derive. +clap = { workspace = true, features = ["derive", "env"] } +prometheus = {workspace = true } +schemars = { workspace = true, features = ["smol_str", "preserve_order"] } +serde = { workspace = true } +serde_json = { workspace = true, features = ["raw_value"] } +smol_str = { workspace = true } +sqlx = { workspace = true, features = ["json", "postgres", "runtime-tokio-rustls"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["full"] } +tracing = { workspace = true } +aws-config = { workspace = true } +aws-sdk-dynamodb = { workspace = true } +aws-smithy-http = "0.60.11" + +[dev-dependencies] +jsonschema = { workspace = true } diff --git a/crates/configuration/src/configuration.rs b/crates/configuration/src/configuration.rs new file mode 100644 index 0000000..3548be3 --- /dev/null +++ b/crates/configuration/src/configuration.rs @@ -0,0 +1,27 @@ +use query_engine_metadata::metadata; + +pub const CONFIGURATION_FILENAME: &str = "configuration.json"; +pub const CONFIGURATION_JSONSCHEMA_FILENAME: &str = "schema.json"; + +pub const DEFAULT_CONNECTION_URI_VARIABLE: &str = "CONNECTION_URI"; + +/// The 'Configuration' type collects all the information necessary to serve queries at runtime. +/// +/// 'ParsedConfiguration' deals with a multitude of different concrete version formats, and each +/// version is responsible for interpreting its serialized format into the current 'Configuration'. +/// Values of this type are produced from a 'ParsedConfiguration' using +/// 'make_runtime_configuration'. +/// +/// Separating 'ParsedConfiguration' and 'Configuration' simplifies the main query translation +/// logic by placing the responsibility of dealing with configuration format evolution in +/// 'ParsedConfiguration. +/// +#[derive(Debug)] +pub struct Configuration { + pub metadata: metadata::Metadata, + pub access_key_id: String, + pub secret_access_key: String, + // pub provider_name: String, + pub region: String, + // pub mutations_version: Option, +} diff --git a/crates/configuration/src/connection_settings.rs b/crates/configuration/src/connection_settings.rs new file mode 100644 index 0000000..fd5743e --- /dev/null +++ b/crates/configuration/src/connection_settings.rs @@ -0,0 +1,39 @@ +//! Database connection settings. + +use crate::values::{AccessKeyId, Region, Secret, SecretAccessKey}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +pub const DEFAULT_ACCESS_KEY_ID_VARIABLE: &str = "HASURA_DYNAMODB_AWS_ACCESS_KEY_ID"; +pub const DEFAULT_SECRET_ACCESS_KEY_VARIABLE: &str = "HASURA_DYNAMODB_AWS_SECRET_ACCESS_KEY"; +pub const DEFAULT_PROVIDER_NAME: &str = "HASURA_DYNAMODB_AWS_PROVIDER_NAME"; +pub const DEFAULT_REGION_VARIABLE: &str = "HASURA_DYNAMODB_AWS_REGION"; + +/// Database connection settings. +#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct DatabaseConnectionSettings { + pub access_key_id: AccessKeyId, + pub secret_access_key: SecretAccessKey, + // pub provider_name: ProviderName, + pub region: Region, +} + +impl DatabaseConnectionSettings { + pub fn empty() -> Self { + Self { + access_key_id: AccessKeyId(Secret::FromEnvironment { + variable: DEFAULT_ACCESS_KEY_ID_VARIABLE.into(), + }), + secret_access_key: SecretAccessKey(Secret::FromEnvironment { + variable: DEFAULT_SECRET_ACCESS_KEY_VARIABLE.into(), + }), + // provider_name: ProviderName(Secret::FromEnvironment { + // variable: DEFAULT_PROVIDER_NAME.into(), + // }), + region: Region(Secret::FromEnvironment { + variable: DEFAULT_REGION_VARIABLE.into(), + }), + } + } +} diff --git a/crates/configuration/src/environment.rs b/crates/configuration/src/environment.rs new file mode 100644 index 0000000..5434b7c --- /dev/null +++ b/crates/configuration/src/environment.rs @@ -0,0 +1,189 @@ +//! Infrastructure for parsing configuration which refers to an environment. + +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +/// A configuration environment, used to supply variables when loading configuration. +pub trait Environment { + fn read(&self, variable: &Variable) -> Result; +} + +/// The name of an an environment variable. +#[derive(Clone, PartialEq, Eq, Hash, Deserialize, Serialize, JsonSchema)] +pub struct Variable(String); + +impl std::fmt::Debug for Variable { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } +} + +impl std::fmt::Display for Variable { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl From for Variable { + fn from(value: String) -> Self { + Self(value) + } +} + +impl From<&str> for Variable { + fn from(value: &str) -> Self { + Self(value.to_string()) + } +} + +impl Variable { + /// Construct a new variable. + pub fn new(value: String) -> Self { + Self(value) + } +} + +/// A value stored in the environment. +pub type Value = String; + +/// Errors that can occur on reading from an environment. +#[derive(Debug, PartialEq, thiserror::Error)] +pub enum Error { + #[error("variable was not present: {0:?}")] + VariableNotPresent(Variable), + #[error("value is not Unicode: {0:?}")] + NonUnicodeValue(std::ffi::OsString), +} + +/// If a value is an environment, so is its reference. +impl Environment for &T { + fn read(&self, variable: &Variable) -> Result { + ::read(*self, variable) + } +} + +/// HashMaps can be treated as environments for testing. +pub type FixedEnvironment = std::collections::HashMap; + +impl Environment for FixedEnvironment { + fn read(&self, variable: &Variable) -> Result { + self.get(variable) + .cloned() + .ok_or_else(|| Error::VariableNotPresent(variable.clone())) + } +} + +/// An empty environment, used when you don't need one. +pub struct EmptyEnvironment; + +impl Environment for EmptyEnvironment { + fn read(&self, variable: &Variable) -> Result { + Err(Error::VariableNotPresent(variable.clone())) + } +} + +/// An environment populated from the process' environment variables. +pub struct ProcessEnvironment; + +impl Environment for ProcessEnvironment { + fn read(&self, variable: &Variable) -> Result { + std::env::var(&variable.0).map_err(|error| match error { + std::env::VarError::NotPresent => Error::VariableNotPresent(variable.clone()), + std::env::VarError::NotUnicode(value) => Error::NonUnicodeValue(value), + }) + } +} + +pub struct JoinEnvironments { + a: A, + b: B, +} + +impl JoinEnvironments { + pub fn new(a: A, b: B) -> Self { + Self { a, b } + } +} + +impl Environment for JoinEnvironments { + fn read(&self, variable: &Variable) -> Result { + self.a.read(variable).or_else(|_| self.b.read(variable)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fixed_environment_reads_a_present_value() { + let environment = FixedEnvironment::from([("THING".into(), "one".into())]); + let variable = "THING".into(); + + let value = environment.read(&variable); + + assert_eq!(value, Ok("one".to_string())); + } + + #[test] + fn test_fixed_environment_does_not_manufacture_values() { + let environment = FixedEnvironment::from([("WHAT".into(), "yes".into())]); + let variable = "ANOTHER_THING".into(); + + let value = environment.read(&variable); + + assert_eq!(value, Err(Error::VariableNotPresent(variable))); + } + + #[test] + fn test_joining_an_environment_looks_in_the_first_one() { + let environment = JoinEnvironments::new( + FixedEnvironment::from([("HELLO".into(), "hallo".into())]), + FixedEnvironment::from([("GOODBYE".into(), "tschuess".into())]), + ); + let variable = "HELLO".into(); + + let value = environment.read(&variable); + + assert_eq!(value, Ok("hallo".to_string())); + } + + #[test] + fn test_joining_an_environment_looks_in_the_second_one() { + let environment = JoinEnvironments::new( + FixedEnvironment::from([("HELLO".into(), "bon journo".into())]), + FixedEnvironment::from([("GOODBYE".into(), "ciao".into())]), + ); + let variable = "GOODBYE".into(); + + let value = environment.read(&variable); + + assert_eq!(value, Ok("ciao".to_string())); + } + + #[test] + fn test_joining_an_environment_favors_the_first_one() { + let environment = JoinEnvironments::new( + FixedEnvironment::from([("HELLO".into(), "bonjour".into())]), + FixedEnvironment::from([("HELLO".into(), "grueezi".into())]), + ); + let variable = "HELLO".into(); + + let value = environment.read(&variable); + + assert_eq!(value, Ok("bonjour".to_string())); + } + + #[test] + fn test_joining_an_environment_eventually_gives_up() { + let environment = JoinEnvironments::new( + FixedEnvironment::from([("HELLO".into(), "bonjour".into())]), + FixedEnvironment::from([("HELLO".into(), "grueezi".into())]), + ); + let variable = "GOODBYE".into(); + + let value = environment.read(&variable); + + assert_eq!(value, Err(Error::VariableNotPresent(variable))); + } +} diff --git a/crates/configuration/src/error.rs b/crates/configuration/src/error.rs new file mode 100644 index 0000000..d9d5c7d --- /dev/null +++ b/crates/configuration/src/error.rs @@ -0,0 +1,76 @@ +//! Errors that can be thrown when processing configuration. + +use std::fmt::Display; + +/// The errors that can be thrown when processing configuration. +/// +/// This is effectively a copy of the `ParseError` enum in the `ndc-sdk` crate. However, we don't +/// want a dependency on that crate here, as this crate is used by the CLI. Duplicating here and +/// converting the values later means we can avoid that dependency. +#[derive(Debug, thiserror::Error)] +pub enum ParseConfigurationError { + #[error("parse error on {file_path}:{line}:{column}: {message}")] + ParseError { + file_path: std::path::PathBuf, + line: usize, + column: usize, + message: String, + }, + + // TODO(PY): replace with the correct input name + #[error("empty connection detail")] + EmptyConnection { file_path: std::path::PathBuf }, + + #[error("I/O error: {0}")] + IoErrorButStringified(String), + + #[error("I/O error: {0}")] + IoError(#[from] std::io::Error), + + #[error("Did not find expected version tag: \"{0}\"")] + DidNotFindExpectedVersionTag(String), + + #[error("Unable to parse any configuration versions: {0}")] + UnableToParseAnyVersions(MultiError), +} + +#[derive(Debug)] +pub struct MultiError(pub Vec<(String, Box)>); + +impl Display for MultiError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for (label, err) in &self.0 { + "\n".fmt(f)?; + " * ".fmt(f)?; + label.fmt(f)?; + ": ".fmt(f)?; + err.fmt(f)?; + "\n".fmt(f)?; + } + Ok(()) + } +} + +#[derive(Debug, thiserror::Error)] +pub enum WriteParsedConfigurationError { + #[error("Version not supported: {0}")] + VersionNotSupported(String), + + #[error("I/O error: {0}")] + IoError(#[from] std::io::Error), + + #[error("Trying to write file \"{file}\" outside destination dir \"{dir}\"")] + WritingOutsideDestinationDir { + dir: std::path::PathBuf, + file: std::path::PathBuf, + }, +} + +#[derive(Debug, thiserror::Error)] +pub enum MakeRuntimeConfigurationError { + #[error("missing environment variable when processing {file_path}: {message}")] + MissingEnvironmentVariable { + file_path: std::path::PathBuf, + message: String, + }, +} diff --git a/crates/configuration/src/lib.rs b/crates/configuration/src/lib.rs new file mode 100644 index 0000000..b4e61cc --- /dev/null +++ b/crates/configuration/src/lib.rs @@ -0,0 +1,22 @@ +pub mod configuration; +pub mod connection_settings; +pub mod environment; +pub mod error; +mod to_runtime_configuration; +mod values; +pub mod version1; + +pub use configuration::Configuration; +pub use to_runtime_configuration::make_runtime_configuration; +pub use values::connection_info::{AccessKeyId, ProviderName, Region, SecretAccessKey}; +pub use version1::{ + introspect, + parse_configuration, + write_parsed_configuration, + // single_connection_uri, // for tests only + // validate_raw_configuration, + // Configuration, + // ConfigurationError, + // PoolSettings, + ParsedConfiguration, +}; diff --git a/crates/configuration/src/to_runtime_configuration.rs b/crates/configuration/src/to_runtime_configuration.rs new file mode 100644 index 0000000..ebe9efa --- /dev/null +++ b/crates/configuration/src/to_runtime_configuration.rs @@ -0,0 +1,288 @@ +//! Convert the parsed configuration metadata to internal engine metadata +//! That can be used by the connector at runtime. + +use super::version1::ParsedConfiguration; +use crate::environment::Environment; +use crate::error::MakeRuntimeConfigurationError; +use crate::values::{AccessKeyId, Region, Secret, SecretAccessKey}; +use query_engine_metadata::{self, metadata}; +// use crate::VersionTag; + +/// Convert the parsed configuration metadata to internal engine metadata +/// That can be used by the connector at runtime. +pub fn make_runtime_configuration( + parsed_config: ParsedConfiguration, + environment: impl Environment, +) -> Result { + let access_key_id = match parsed_config.connection_settings.access_key_id { + AccessKeyId(Secret::Plain(key)) => Ok(key), + AccessKeyId(Secret::FromEnvironment { variable }) => { + environment.read(&variable).map_err(|error| { + MakeRuntimeConfigurationError::MissingEnvironmentVariable { + file_path: super::version1::CONFIGURATION_FILENAME.into(), + message: error.to_string(), + } + }) + } + }?; + let secret_access_key = match parsed_config.connection_settings.secret_access_key { + SecretAccessKey(Secret::Plain(key)) => Ok(key), + SecretAccessKey(Secret::FromEnvironment { variable }) => { + environment.read(&variable).map_err(|error| { + MakeRuntimeConfigurationError::MissingEnvironmentVariable { + file_path: super::version1::CONFIGURATION_FILENAME.into(), + message: error.to_string(), + } + }) + } + }?; + let region = match parsed_config.connection_settings.region { + Region(Secret::Plain(key)) => Ok(key), + Region(Secret::FromEnvironment { variable }) => { + environment.read(&variable).map_err(|error| { + MakeRuntimeConfigurationError::MissingEnvironmentVariable { + file_path: super::version1::CONFIGURATION_FILENAME.into(), + message: error.to_string(), + } + }) + } + }?; + Ok(crate::Configuration { + metadata: convert_metadata(parsed_config.metadata), + access_key_id, + secret_access_key, + // provider_name, + region, + // pool_settings: parsed_config.pool_settings, + // mutations_version: convert_mutations_version(parsed_config.mutations_version), + }) +} + +/// Convert the metadata specified in the parsed configuration to an engine metadata. +/// This function is used by tests as well +pub fn convert_metadata(metadata: metadata::Metadata) -> query_engine_metadata::metadata::Metadata { + query_engine_metadata::metadata::Metadata { + tables: convert_tables(metadata.tables), + scalar_types: convert_scalar_types(metadata.scalar_types), + // composite_types: convert_composite_types(metadata.composite_types), + // native_operations: convert_native_operations(metadata.native_operations), + } +} + +fn convert_scalar_types( + scalar_types: metadata::ScalarTypes, +) -> query_engine_metadata::metadata::ScalarTypes { + query_engine_metadata::metadata::ScalarTypes( + scalar_types + .0 + .into_iter() + .map(|(scalar_type_name, scalar_type)| { + ( + scalar_type_name, + query_engine_metadata::metadata::ScalarType { + type_name: scalar_type.type_name, + // schema_name: (scalar_type.schema_name), + description: scalar_type.description, + comparison_operators: scalar_type + .comparison_operators + .into_iter() + .map(|(k, v)| (k, convert_comparison_operator(v))) + .collect(), + type_representation: scalar_type + .type_representation + .map(convert_type_representation), + }, + ) + }) + .collect(), + ) +} + +fn convert_nullable(nullable: &metadata::Nullable) -> query_engine_metadata::metadata::Nullable { + match nullable { + metadata::Nullable::Nullable => query_engine_metadata::metadata::Nullable::Nullable, + metadata::Nullable::NonNullable => query_engine_metadata::metadata::Nullable::NonNullable, + } +} + +fn convert_type(r#type: metadata::Type) -> query_engine_metadata::metadata::Type { + match r#type { + metadata::Type::ScalarType(t) => query_engine_metadata::metadata::Type::ScalarType(t), + metadata::Type::ArrayType(t) => { + query_engine_metadata::metadata::Type::ArrayType(Box::new(convert_type(*t))) + } + } +} + +fn convert_type_representation( + type_representation: metadata::TypeRepresentation, +) -> query_engine_metadata::metadata::TypeRepresentation { + match type_representation { + metadata::TypeRepresentation::Boolean => { + query_engine_metadata::metadata::TypeRepresentation::Boolean + } + metadata::TypeRepresentation::String => { + query_engine_metadata::metadata::TypeRepresentation::String + } + metadata::TypeRepresentation::Float32 => { + query_engine_metadata::metadata::TypeRepresentation::Float32 + } + metadata::TypeRepresentation::Float64 => { + query_engine_metadata::metadata::TypeRepresentation::Float64 + } + metadata::TypeRepresentation::Int16 => { + query_engine_metadata::metadata::TypeRepresentation::Int16 + } + metadata::TypeRepresentation::Int32 => { + query_engine_metadata::metadata::TypeRepresentation::Int32 + } + metadata::TypeRepresentation::Int64 => { + query_engine_metadata::metadata::TypeRepresentation::Int64 + } + metadata::TypeRepresentation::Int64AsString => { + query_engine_metadata::metadata::TypeRepresentation::Int64AsString + } + metadata::TypeRepresentation::BigDecimal => { + query_engine_metadata::metadata::TypeRepresentation::BigDecimal + } + metadata::TypeRepresentation::BigDecimalAsString => { + query_engine_metadata::metadata::TypeRepresentation::BigDecimalAsString + } + metadata::TypeRepresentation::Timestamp => { + query_engine_metadata::metadata::TypeRepresentation::Timestamp + } + metadata::TypeRepresentation::Timestamptz => { + query_engine_metadata::metadata::TypeRepresentation::Timestamptz + } + metadata::TypeRepresentation::Time => { + query_engine_metadata::metadata::TypeRepresentation::Time + } + metadata::TypeRepresentation::Timetz => { + query_engine_metadata::metadata::TypeRepresentation::Timetz + } + metadata::TypeRepresentation::Date => { + query_engine_metadata::metadata::TypeRepresentation::Date + } + metadata::TypeRepresentation::UUID => { + query_engine_metadata::metadata::TypeRepresentation::UUID + } + metadata::TypeRepresentation::Geography => { + query_engine_metadata::metadata::TypeRepresentation::Geography + } + metadata::TypeRepresentation::Geometry => { + query_engine_metadata::metadata::TypeRepresentation::Geometry + } + // This is deprecated in ndc-spec + // TODO(PY): do we want to include number and integer? + // metadata::TypeRepresentation::Number + // | metadata::TypeRepresentation::Integer + metadata::TypeRepresentation::Json => { + query_engine_metadata::metadata::TypeRepresentation::Json + } + metadata::TypeRepresentation::Enum(v) => { + query_engine_metadata::metadata::TypeRepresentation::Enum(v) + } + } +} + +fn convert_comparison_operator( + comparison_operator: metadata::ComparisonOperator, +) -> query_engine_metadata::metadata::ComparisonOperator { + query_engine_metadata::metadata::ComparisonOperator { + operator_name: comparison_operator.operator_name, + operator_kind: convert_operator_kind(&comparison_operator.operator_kind), + argument_type: comparison_operator.argument_type, + is_infix: comparison_operator.is_infix, + } +} + +fn convert_operator_kind( + operator_kind: &metadata::OperatorKind, +) -> query_engine_metadata::metadata::OperatorKind { + match operator_kind { + metadata::OperatorKind::Equal => query_engine_metadata::metadata::OperatorKind::Equal, + metadata::OperatorKind::In => query_engine_metadata::metadata::OperatorKind::In, + metadata::OperatorKind::Custom => query_engine_metadata::metadata::OperatorKind::Custom, + } +} + +// fn convert_composite_types( +// composite_types: metadata::CompositeTypes, +// ) -> query_engine_metadata::metadata::CompositeTypes { +// query_engine_metadata::metadata::CompositeTypes( +// composite_types +// .0 +// .into_iter() +// .map(|(k, composite_type)| (k, convert_composite_type(composite_type))) +// .collect(), +// ) +// } + +// fn convert_composite_type( +// composite_type: metadata::CompositeType, +// ) -> query_engine_metadata::metadata::CompositeType { +// query_engine_metadata::metadata::CompositeType { +// type_name: composite_type.type_name, +// schema_name: (composite_type.schema_name), +// fields: composite_type +// .fields +// .into_iter() +// .map(|(k, field)| (k, convert_composite_type_field_info(field))) +// .collect(), +// description: composite_type.description, +// } +// } + +// fn convert_composite_type_field_info( +// field: metadata::FieldInfo, +// ) -> query_engine_metadata::metadata::FieldInfo { +// query_engine_metadata::metadata::FieldInfo { +// field_name: field.field_name, +// r#type: convert_type(field.r#type), +// description: field.description, +// } +// } + +pub fn convert_tables(tables: metadata::TablesInfo) -> query_engine_metadata::metadata::TablesInfo { + query_engine_metadata::metadata::TablesInfo( + tables + .0 + .into_iter() + .map(|(k, table_info)| (k, convert_table_info(table_info))) + .collect(), + ) +} + +fn convert_table_info( + table_info: metadata::TableInfo, +) -> query_engine_metadata::metadata::TableInfo { + query_engine_metadata::metadata::TableInfo { + // schema_name: table_info.schema_name, + table_name: table_info.table_name, + columns: table_info + .columns + .into_iter() + .map(|(k, column_info)| (k, convert_column_info(column_info))) + .collect(), + // uniqueness_constraints: (table_info.uniqueness_constraints), + // foreign_relations: convert_foreign_relations(table_info.foreign_relations), + description: table_info.description, + partition_key: table_info.partition_key, + sort_key: table_info.sort_key, + gsi: table_info.gsi, + } +} + +fn convert_column_info( + column_info: metadata::ColumnInfo, +) -> query_engine_metadata::metadata::ColumnInfo { + query_engine_metadata::metadata::ColumnInfo { + name: column_info.name, + r#type: convert_type(column_info.r#type), + nullable: convert_nullable(&column_info.nullable), + // has_default: convert_has_default(&column_info.has_default), + // is_identity: convert_is_identity(&column_info.is_identity), + // is_generated: convert_is_generated(&column_info.is_generated), + description: column_info.description, + } +} diff --git a/crates/configuration/src/values/connection_info.rs b/crates/configuration/src/values/connection_info.rs new file mode 100644 index 0000000..ba71cca --- /dev/null +++ b/crates/configuration/src/values/connection_info.rs @@ -0,0 +1,64 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use super::Secret; + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, JsonSchema)] +pub struct AccessKeyId(pub Secret); + +impl From for AccessKeyId { + fn from(value: String) -> Self { + Self(value.into()) + } +} + +impl From<&str> for AccessKeyId { + fn from(value: &str) -> Self { + Self::from(value.to_string()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, JsonSchema)] +pub struct SecretAccessKey(pub Secret); + +impl From for SecretAccessKey { + fn from(value: String) -> Self { + Self(value.into()) + } +} + +impl From<&str> for SecretAccessKey { + fn from(value: &str) -> Self { + Self::from(value.to_string()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, JsonSchema)] +pub struct ProviderName(pub Secret); + +impl From for ProviderName { + fn from(value: String) -> Self { + Self(value.into()) + } +} + +impl From<&str> for ProviderName { + fn from(value: &str) -> Self { + Self::from(value.to_string()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, JsonSchema)] +pub struct Region(pub Secret); + +impl From for Region { + fn from(value: String) -> Self { + Self(value.into()) + } +} + +impl From<&str> for Region { + fn from(value: &str) -> Self { + Self::from(value.to_string()) + } +} diff --git a/crates/configuration/src/values/mod.rs b/crates/configuration/src/values/mod.rs new file mode 100644 index 0000000..9793a0a --- /dev/null +++ b/crates/configuration/src/values/mod.rs @@ -0,0 +1,6 @@ +pub mod connection_info; +mod pool_settings; +mod secret; + +pub use connection_info::{AccessKeyId, Region, SecretAccessKey}; +pub use secret::Secret; diff --git a/crates/configuration/src/values/pool_settings.rs b/crates/configuration/src/values/pool_settings.rs new file mode 100644 index 0000000..cb3850b --- /dev/null +++ b/crates/configuration/src/values/pool_settings.rs @@ -0,0 +1,56 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +/// Settings for the DynamoDB connection pool +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct PoolSettings { + /// maximum number of pool connections + #[serde(default = "max_connection_default")] + pub max_connections: u32, + /// timeout for acquiring a connection from the pool (seconds) + #[serde(default = "pool_timeout_default")] + pub pool_timeout: u64, + /// idle timeout for releasing a connection from the pool (seconds) + #[serde(default = "idle_timeout_default")] + pub idle_timeout: Option, + /// check the connection is alive after being idle for N seconds. Set to null to always check. + #[serde(default = "check_connection_after_idle_default")] + pub check_connection_after_idle: Option, + /// maximum lifetime for an individual connection (seconds) + #[serde(default = "connection_lifetime_default")] + pub connection_lifetime: Option, +} + +/// +impl Default for PoolSettings { + fn default() -> PoolSettings { + PoolSettings { + max_connections: 50, + pool_timeout: 30, + idle_timeout: Some(180), + connection_lifetime: Some(600), + check_connection_after_idle: Some(60), + } + } +} + +fn max_connection_default() -> u32 { + PoolSettings::default().max_connections +} + +fn pool_timeout_default() -> u64 { + PoolSettings::default().pool_timeout +} + +fn idle_timeout_default() -> Option { + PoolSettings::default().idle_timeout +} + +fn connection_lifetime_default() -> Option { + PoolSettings::default().connection_lifetime +} + +fn check_connection_after_idle_default() -> Option { + PoolSettings::default().check_connection_after_idle +} diff --git a/crates/configuration/src/values/secret.rs b/crates/configuration/src/values/secret.rs new file mode 100644 index 0000000..abc8ccc --- /dev/null +++ b/crates/configuration/src/values/secret.rs @@ -0,0 +1,26 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use crate::environment; + +// Configuration type for values that can come from secrets. That format includes both literal +// values as well as symbolic references to secrets. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, JsonSchema)] +#[serde(untagged, rename_all = "camelCase")] +pub enum Secret { + Plain(String), + FromEnvironment { variable: environment::Variable }, +} + +// This conversion is useful for testing. +impl From for Secret { + fn from(value: String) -> Self { + Self::Plain(value) + } +} + +impl From<&str> for Secret { + fn from(value: &str) -> Self { + Self::Plain(value.to_string()) + } +} diff --git a/crates/configuration/src/version1.rs b/crates/configuration/src/version1.rs new file mode 100644 index 0000000..24feb00 --- /dev/null +++ b/crates/configuration/src/version1.rs @@ -0,0 +1,472 @@ +//! Internal Configuration and state for our connector. + +use crate::environment::Environment; +use crate::error::WriteParsedConfigurationError; +use crate::values::Secret; +use crate::{connection_settings, AccessKeyId, SecretAccessKey}; + +use super::error::ParseConfigurationError; +use aws_sdk_dynamodb::types::KeyType; +use aws_sdk_dynamodb::Config; +use ndc_models::{CollectionName, ComparisonOperatorName, FieldName, ScalarTypeName}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::borrow::Cow; +use std::collections::{BTreeMap, BTreeSet}; +use std::path::Path; +use tokio::fs; + +use query_engine_metadata::metadata::{ + self, database, ColumnInfo, Nullable, ProjectionTypeInfo, ScalarTypes, TablesInfo, +}; + +const CURRENT_VERSION: u32 = 1; +pub const CONFIGURATION_FILENAME: &str = "configuration.json"; +const CHARACTER_STRINGS: [&str; 3] = ["character", "text", "string"]; +const UNICODE_CHARACTER_STRINGS: [&str; 3] = ["nchar", "ntext", "nvarchar"]; +const CANNOT_COMPARE: [&str; 3] = ["text", "ntext", "image"]; + +/// Initial configuration, just enough to connect to a database and elaborate a full +/// 'Configuration'. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct ParsedConfiguration { + // Which version of the configuration format are we using + pub version: u32, + pub connection_settings: connection_settings::DatabaseConnectionSettings, + #[serde(default)] + pub metadata: metadata::Metadata, + // #[serde(default)] + // pub introspection_options: options::IntrospectionOptions, +} + +impl ParsedConfiguration { + pub fn initial() -> Self { + ParsedConfiguration::empty() + } +} + +#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize, JsonSchema)] +pub enum Version { + #[serde(rename = "1")] + This, +} + +impl ParsedConfiguration { + pub fn empty() -> Self { + Self { + version: CURRENT_VERSION, + connection_settings: connection_settings::DatabaseConnectionSettings::empty(), + metadata: metadata::Metadata::default(), + // aggregate_functions: metadata::AggregateFunctions::default(), + } + } +} + +pub async fn introspect( + args: &ParsedConfiguration, + environment: impl Environment, +) -> anyhow::Result { + let access_key_id = match &args.connection_settings.access_key_id { + AccessKeyId(Secret::Plain(value)) => Cow::Borrowed(value), + AccessKeyId(Secret::FromEnvironment { variable }) => { + Cow::Owned(environment.read(variable)?) + } + }; + let secret_access_key = match &args.connection_settings.secret_access_key { + SecretAccessKey(Secret::Plain(value)) => Cow::Borrowed(value), + SecretAccessKey(Secret::FromEnvironment { variable }) => { + Cow::Owned(environment.read(variable)?) + } + }; + // let provider_name = match &args.connection_settings.provider_name { + // ProviderName(Secret::Plain(value)) => Cow::Borrowed(value), + // ProviderName(Secret::FromEnvironment { variable }) => Cow::Owned(environment.read(variable)?), + // }; + let region = match &args.connection_settings.region { + crate::Region(Secret::Plain(value)) => Cow::Borrowed(value), + crate::Region(Secret::FromEnvironment { variable }) => { + Cow::Owned(environment.read(variable)?) + } + }; + // let access_key_id = args.connection_settings.access_key_id.clone(); + // let secret_access_key = args.connection_settings.secret_access_key.clone(); + // let session_token = args.connection_settings.session_token.clone(); + // let region = args.connection_settings.region.clone(); + // let config = aws_config::load_from_env().await; + let credentials = aws_sdk_dynamodb::config::Credentials::new( + access_key_id.to_string(), + secret_access_key.to_string(), + None, // Optional session token + None, // Expiration (None for non-expiring) + "my-provider", // Provider name + ); + + // Configure AWS SDK with explicit credentials + let config = Config::builder() + .region(aws_config::Region::new(region.to_string())) + .credentials_provider(credentials) + .build(); + + // To use localhost url + // let config = aws_config::defaults(aws_config::BehaviorVersion::latest()) + // .test_credentials() + // .region(aws_config::Region::new("us-west-2")) + // // DynamoDB run locally uses port 8000 by default. + // .endpoint_url("http://localhost:8085") + // .load() + // .await; + // let dynamodb_local_config = aws_sdk_dynamodb::config::Builder::from(&config).build(); + + let client = aws_sdk_dynamodb::Client::from_conf(config); + let tables_result = client.list_tables().send().await; + let tables = tables_result + .map_err(|_op| { + ParseConfigurationError::IoErrorButStringified("Failed to list tables:".to_string()) + }) + .unwrap(); //TODO: handle error + let table_names = tables.table_names.unwrap_or_default(); + let mut scalars_list: BTreeSet = BTreeSet::new(); + let mut tables_info: BTreeMap = BTreeMap::new(); + for table_name in table_names { + let table_result = client.describe_table().table_name(table_name).send().await; + let table = table_result.unwrap(); //TODO: handle error + let table = table.table.unwrap(); + let table_name = table.table_name.unwrap(); + let attribute_definitions = table.attribute_definitions.unwrap(); + let mut columns_info: BTreeMap = BTreeMap::new(); + for columns in attribute_definitions { + let scalar_name = columns.attribute_name; + let scalar_field_name = FieldName::new(scalar_name.clone().into()); + let scalar_type_attribute = columns.attribute_type; + let scalar_type_name = match scalar_type_attribute.as_str() { + "S" => ScalarTypeName::new("String".into()), + "N" => ScalarTypeName::new("Number".into()), + "B" => ScalarTypeName::new("Binary".into()), + _ => ScalarTypeName::new("Any".into()), + }; + scalars_list.insert(scalar_type_name.clone()); + let scalar_type = metadata::Type::ScalarType(scalar_type_name); + let column_info = ColumnInfo { + name: scalar_name, + r#type: scalar_type, + nullable: Nullable::Nullable, + description: None, + }; + columns_info.insert(scalar_field_name, column_info); + } + + //get non key attributes + let result = client + .execute_statement() + .statement(format!(r#"select * from {table_name}"#)) + .set_parameters(None) + .set_limit(Some(20)) + .send() + .await + .unwrap(); + + for item in &result.items.unwrap() { + for (key, attribute_value) in item { + let column_name = FieldName::new(key.clone().into()); + let column_type = if attribute_value.is_s() { + let scalar_type_name = ScalarTypeName::new("String".into()); + scalars_list.insert(scalar_type_name.clone()); + metadata::Type::ScalarType(scalar_type_name) + } else if attribute_value.is_n() { + let scalar_type_name = ScalarTypeName::new("Number".into()); + scalars_list.insert(scalar_type_name.clone()); + metadata::Type::ScalarType(scalar_type_name) + } else if attribute_value.is_bool() { + let scalar_type_name = ScalarTypeName::new("Boolean".into()); + scalars_list.insert(scalar_type_name.clone()); + metadata::Type::ScalarType(scalar_type_name) + } else if attribute_value.is_b() { + let scalar_type_name = ScalarTypeName::new("Binary".into()); + scalars_list.insert(scalar_type_name.clone()); + metadata::Type::ScalarType(scalar_type_name) + } else if attribute_value.is_l() { + let scalar_type_name = ScalarTypeName::new("List".into()); + metadata::Type::ScalarType(scalar_type_name) + } else if attribute_value.is_m() { + let scalar_type_name = ScalarTypeName::new("Map".into()); + metadata::Type::ScalarType(scalar_type_name) + } else { + metadata::Type::ScalarType(ScalarTypeName::new("Any".into())) + }; + let column_info = ColumnInfo { + name: key.clone(), + r#type: column_type, + nullable: Nullable::Nullable, + description: None, + }; + columns_info.insert(column_name, column_info); + } + } + + // + let mut key_info: BTreeMap = BTreeMap::new(); + let key_schema = table.key_schema.unwrap(); + for key in key_schema { + let name = key.attribute_name; + let key_type = key.key_type; + + if key_type == KeyType::Hash || key_type == KeyType::Range { + key_info.insert(key_type, name); + } + } + let partition_key = key_info.get(&KeyType::Hash).unwrap(); + let sort_key = key_info.get(&KeyType::Range).unwrap(); + + let mut gsi_indexes: BTreeMap = BTreeMap::new(); + let gsis = table.global_secondary_indexes.unwrap(); + for gsi in gsis { + let index_name = gsi.index_name.unwrap(); + let mut index_keys_info: BTreeMap = BTreeMap::new(); + let index_keys = gsi.key_schema.unwrap(); + for key in index_keys { + let name = key.attribute_name; + let key_type = key.key_type; + + if key_type == KeyType::Hash || key_type == KeyType::Range { + index_keys_info.insert(key_type, name); + } + } + let partition_key = index_keys_info.get(&KeyType::Hash).unwrap(); + let sort_key: Option = index_keys_info.get(&KeyType::Range).cloned(); + + let projection_type = gsi + .projection + .clone() + .unwrap() + .projection_type + .unwrap() + .as_str() + .to_string(); + let non_key_attributes = gsi + .projection + .unwrap() + .non_key_attributes + .unwrap_or_default(); + gsi_indexes.insert( + index_name, + metadata::GlobalSecondaryIndexInfo { + partition_key: partition_key.to_owned(), + sort_key, + projection_type: ProjectionTypeInfo { + projection_type, + non_key_attributes, + }, + }, + ); + } + let table_info = metadata::TableInfo { + table_name: table_name.clone(), + columns: columns_info, + partition_key: partition_key.to_owned(), + sort_key: sort_key.to_owned(), + gsi: metadata::GlobalSecondaryIndexes(gsi_indexes), + description: None, + }; + tables_info.insert(CollectionName::new(table_name.into()), table_info); + } + // Scalars + let mut scalars: BTreeMap = BTreeMap::new(); + for scalar in scalars_list { + let type_rep = match scalar.as_str() { + "String" => Some(metadata::TypeRepresentation::String), + "Number" => Some(metadata::TypeRepresentation::Int64), + "Boolean" => Some(metadata::TypeRepresentation::Boolean), + _ => None, + }; + let scalar_type = metadata::ScalarType { + type_name: scalar.clone(), + description: None, + comparison_operators: get_comparison_operators_for_type(&scalar), + type_representation: type_rep, + }; + scalars.insert(scalar.clone(), scalar_type); + } + Ok(ParsedConfiguration { + version: 1, + connection_settings: connection_settings::DatabaseConnectionSettings { + access_key_id: args.connection_settings.access_key_id.clone(), + secret_access_key: args.connection_settings.secret_access_key.clone(), + // provider_name: args.connection_settings.provider_name.clone(), + region: args.connection_settings.region.clone(), + }, + metadata: metadata::Metadata { + tables: TablesInfo(tables_info), + scalar_types: ScalarTypes(scalars), + }, + }) +} + +/// Parse the configuration format from a directory. +pub async fn parse_configuration( + configuration_dir: impl AsRef + Send, +) -> Result { + let configuration_file = configuration_dir.as_ref().join(CONFIGURATION_FILENAME); + + let configuration_file_contents = + fs::read_to_string(&configuration_file) + .await + .map_err(|err| { + ParseConfigurationError::IoErrorButStringified(format!( + "{}: {}", + &configuration_file.display(), + err + )) + })?; + + let parsed_config: ParsedConfiguration = serde_json::from_str(&configuration_file_contents) + .map_err(|error| ParseConfigurationError::ParseError { + file_path: configuration_file.clone(), + line: error.line(), + column: error.column(), + message: error.to_string(), + })?; + + Ok(parsed_config) +} + +/// Write the parsed configuration into a directory on disk. +pub async fn write_parsed_configuration( + parsed_config: ParsedConfiguration, + out_dir: impl AsRef, +) -> Result<(), WriteParsedConfigurationError> { + let configuration_file = out_dir.as_ref().to_owned().join(CONFIGURATION_FILENAME); + fs::create_dir_all(out_dir.as_ref()).await?; + + // create the configuration file + fs::write( + configuration_file, + serde_json::to_string_pretty(&parsed_config) + .map_err(|e| WriteParsedConfigurationError::IoError(e.into()))? + + "\n", + ) + .await?; + + // // create the jsonschema file + // let configuration_jsonschema_file_path = out_dir + // .as_ref() + // .to_owned() + // .join(CONFIGURATION_JSONSCHEMA_FILENAME); + + // let output = schemars::schema_for!(ParsedConfiguration); + // fs::write( + // &configuration_jsonschema_file_path, + // serde_json::to_string_pretty(&output) + // .map_err(|e| WriteParsedConfigurationError::IoError(e.into()))? + // + "\n", + // ) + // .await?; + + Ok(()) +} + +// we hard code these, essentially +// we look up available types in `sys.types` but hard code their behaviour by looking them up below +// categories taken from https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql +fn get_comparison_operators_for_type( + type_name: &ndc_models::ScalarTypeName, +) -> BTreeMap { + let mut comparison_operators = BTreeMap::new(); + + // in ndc-spec, all things can be `==` + comparison_operators.insert( + ComparisonOperatorName::new("_eq".into()), + database::ComparisonOperator { + operator_name: "=".to_string(), + argument_type: type_name.clone(), + operator_kind: database::OperatorKind::Equal, + is_infix: true, + }, + ); + + comparison_operators.insert( + ComparisonOperatorName::new("_in".into()), + database::ComparisonOperator { + operator_name: "IN".to_string(), + argument_type: type_name.clone(), + operator_kind: database::OperatorKind::In, + is_infix: true, + }, + ); + + // include LIKE and NOT LIKE for string-ish types + if CHARACTER_STRINGS.contains(&type_name.as_str()) + || UNICODE_CHARACTER_STRINGS.contains(&type_name.as_str()) + { + comparison_operators.insert( + ComparisonOperatorName::new("_like".into()), + database::ComparisonOperator { + operator_name: "LIKE".to_string(), + argument_type: type_name.clone(), + operator_kind: database::OperatorKind::Custom, + is_infix: true, + }, + ); + comparison_operators.insert( + ComparisonOperatorName::new("_nlike".into()), + database::ComparisonOperator { + operator_name: "NOT LIKE".to_string(), + argument_type: type_name.clone(), + operator_kind: database::OperatorKind::Custom, + is_infix: true, + }, + ); + } + + // include comparison operators for types that are comparable, according to + // https://learn.microsoft.com/en-us/sql/t-sql/language-elements/comparison-operators-transact-sql?view=sql-server-ver16 + if !CANNOT_COMPARE.contains(&type_name.as_str()) { + comparison_operators.insert( + ComparisonOperatorName::new("_neq".into()), + database::ComparisonOperator { + operator_name: "!=".to_string(), + argument_type: type_name.clone(), + operator_kind: database::OperatorKind::Custom, + is_infix: true, + }, + ); + comparison_operators.insert( + ComparisonOperatorName::new("_lt".into()), + database::ComparisonOperator { + operator_name: "<".to_string(), + argument_type: type_name.clone(), + operator_kind: database::OperatorKind::Custom, + is_infix: true, + }, + ); + comparison_operators.insert( + ComparisonOperatorName::new("_gt".into()), + database::ComparisonOperator { + operator_name: ">".to_string(), + argument_type: type_name.clone(), + operator_kind: database::OperatorKind::Custom, + is_infix: true, + }, + ); + + comparison_operators.insert( + ComparisonOperatorName::new("_gte".into()), + database::ComparisonOperator { + operator_name: ">=".to_string(), + argument_type: type_name.clone(), + operator_kind: database::OperatorKind::Custom, + is_infix: true, + }, + ); + comparison_operators.insert( + ComparisonOperatorName::new("_lte".into()), + database::ComparisonOperator { + operator_name: "<=".to_string(), + argument_type: type_name.clone(), + operator_kind: database::OperatorKind::Custom, + is_infix: true, + }, + ); + } + comparison_operators +} diff --git a/crates/ndc-dynamodb/Cargo.toml b/crates/ndc-dynamodb/Cargo.toml new file mode 100644 index 0000000..6d23e91 --- /dev/null +++ b/crates/ndc-dynamodb/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "ndc-dynamodb" +version.workspace = true +edition.workspace = true + +default-run = "ndc-dynamodb" + +[lints] +workspace = true + +[lib] +name = "ndc_dynamodb" +path = "src/lib.rs" + +[[bin]] +name = "ndc-dynamodb" +path = "bin/main.rs" + +[dependencies] +ndc-sdk = { workspace = true } +ndc-dynamodb-configuration = { path = "../configuration" } +query-engine-sql = { path = "../query-engine/sql" } +query-engine-translation = { path = "../query-engine/translation" } +query-engine-execution = { path = "../query-engine/execution" } +query-engine-metadata = { path = "../query-engine/metadata" } + +anyhow = { workspace = true } +async-trait = { workspace = true } +mimalloc = { workspace = true } +percent-encoding = { workspace = true } +prometheus = { workspace = true } +serde_json = { workspace = true, features = ["raw_value"] } +sqlx = { workspace = true, features = ["json", "postgres", "runtime-tokio-rustls"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["full"] } +tracing = { workspace = true } +url = { workspace = true } +aws-config = { workspace = true, features = ["behavior-version-latest"] } +aws-sdk-dynamodb = { workspace = true } \ No newline at end of file diff --git a/crates/ndc-dynamodb/bin/main.rs b/crates/ndc-dynamodb/bin/main.rs new file mode 100644 index 0000000..d74e274 --- /dev/null +++ b/crates/ndc-dynamodb/bin/main.rs @@ -0,0 +1,17 @@ +use std::process::ExitCode; + +use ndc_dynamodb::connector::DynamoDBSetup; +use ndc_dynamodb_configuration::environment::ProcessEnvironment; +use ndc_sdk::default_main::default_main_with; + +#[tokio::main] +pub async fn main() -> ExitCode { + let result = default_main_with(DynamoDBSetup::new(ProcessEnvironment)).await; + match result { + Ok(()) => ExitCode::SUCCESS, + Err(error) => { + eprintln!("{error}"); + ExitCode::FAILURE + } + } +} diff --git a/crates/ndc-dynamodb/src/capabilities.rs b/crates/ndc-dynamodb/src/capabilities.rs new file mode 100644 index 0000000..c2cfb4c --- /dev/null +++ b/crates/ndc-dynamodb/src/capabilities.rs @@ -0,0 +1,30 @@ +//! `/capabilities` endpoint for the connector. + +use ndc_sdk::models; + +/// Get the connector's capabilities. +/// +/// This function implements the [capabilities endpoint](https://hasura.github.io/ndc-spec/specification/capabilities.html) +/// from the NDC specification. +pub fn get_capabilities() -> models::Capabilities { + models::Capabilities { + query: models::QueryCapabilities { + aggregates: Some(models::LeafCapability {}), + variables: None, + explain: None, + exists: models::ExistsCapabilities { + nested_collections: None, + }, + nested_fields: models::NestedFieldCapabilities { + filter_by: None, + order_by: None, + aggregates: None, + }, + }, + mutation: models::MutationCapabilities { + transactional: None, + explain: None, + }, + relationships: None, + } +} diff --git a/crates/ndc-dynamodb/src/connector.rs b/crates/ndc-dynamodb/src/connector.rs new file mode 100644 index 0000000..27f0906 --- /dev/null +++ b/crates/ndc-dynamodb/src/connector.rs @@ -0,0 +1,277 @@ +//! This defines a `Connector` implementation for DynamoDB. +//! +//! The routes are defined here. + +use std::path::Path; +use std::sync::Arc; + +use async_trait::async_trait; +use tracing::{info_span, Instrument}; + +use ndc_sdk::connector; +use ndc_sdk::connector::{Connector, ConnectorSetup, Result}; +use ndc_sdk::json_response::JsonResponse; +use ndc_sdk::models; + +use ndc_dynamodb_configuration as configuration; +// use super::configuration; +use ndc_dynamodb_configuration::environment::Environment; + +use super::capabilities; +// use super::mutation; +use super::query; +use super::schema; +use super::state; + +pub struct DynamoDB; + +#[async_trait] +impl Connector for DynamoDB { + /// The parsed configuration + type Configuration = Arc; + /// The unserializable, transient state + type State = Arc; + + /// Update any metrics from the state + /// + /// Note: some metrics can be updated directly, and do not + /// need to be updated here. This function can be useful to + /// query metrics which cannot be updated directly, e.g. + /// the number of idle connections in a connection pool + /// can be polled but not updated directly. + fn fetch_metrics( + _configuration: &Arc, + _state: &Self::State, + ) -> Result<()> { + Ok(()) + } + + /// Get the connector's capabilities. + /// + /// This function implements the [capabilities endpoint](https://hasura.github.io/ndc-spec/specification/capabilities.html) + /// from the NDC specification. + async fn get_capabilities() -> models::Capabilities { + capabilities::get_capabilities() + } + + /// Get the connector's schema. + /// + /// This function implements the [schema endpoint](https://hasura.github.io/ndc-spec/specification/schema/index.html) + /// from the NDC specification. + async fn get_schema( + configuration: &Self::Configuration, + ) -> Result> { + schema::get_schema(configuration) + // .await + .map_err(|err| { + // TODO(PY): await? + tracing::error!( + meta.signal_type = "log", + event.domain = "ndc", + event.name = "Schema error", + name = "Schema error", + body = %err, + error = true, + ); + err + }) + .map(Into::into) + } + + /// Explain a query by creating an execution plan + /// + /// This function implements the [query/explain endpoint](https://hasura.github.io/ndc-spec/specification/explain.html) + /// from the NDC specification. + async fn query_explain( + _configuration: &Self::Configuration, + _state: &Self::State, + _request: models::QueryRequest, + ) -> Result> { + todo!("query explain is currently not implemented") + } + + /// Explain a mutation by creating an execution plan + /// + /// This function implements the [mutation/explain endpoint](https://hasura.github.io/ndc-spec/specification/explain.html) + /// from the NDC specification. + async fn mutation_explain( + _configuration: &Self::Configuration, + _state: &Self::State, + _request: models::MutationRequest, + ) -> Result> { + todo!("mutation explain is currently not implemented") + } + + /// Execute a mutation + /// + /// This function implements the [mutation endpoint](https://hasura.github.io/ndc-spec/specification/mutations/index.html) + /// from the NDC specification. + async fn mutation( + _configuration: &Self::Configuration, + _state: &Self::State, + _request: models::MutationRequest, + ) -> Result> { + todo!("mutation is currently not implemented") + } + + /// Execute a query + /// + /// This function implements the [query endpoint](https://hasura.github.io/ndc-spec/specification/queries/index.html) + /// from the NDC specification. + async fn query( + configuration: &Self::Configuration, + state: &Self::State, + query_request: models::QueryRequest, + ) -> Result> { + query::query(configuration, state, query_request) + .await + .map_err(|err| { + tracing::error!( + meta.signal_type = "log", + event.domain = "ndc", + event.name = "Query error", + name = "Query error", + body = %err, + error = true, + ); + err + }) + } +} + +pub struct DynamoDBSetup { + environment: Env, +} + +impl DynamoDBSetup { + pub fn new(environment: Env) -> Self { + Self { environment } + } +} + +#[async_trait] +impl ConnectorSetup for DynamoDBSetup { + type Connector = DynamoDB; + + /// Validate the raw configuration provided by the user, + /// returning a configuration error or a validated `Connector::Configuration`. + async fn parse_configuration( + &self, + configuration_dir: impl AsRef + Send, + ) -> Result<::Configuration> { + // Note that we don't log validation errors, because they are part of the normal business + // operation of configuration validation, i.e. they don't represent an error condition that + // signifies that anything has gone wrong with the ndc process or infrastructure. + let parsed_configuration = configuration::parse_configuration(configuration_dir) + .instrument(info_span!("parse configuration")) + .await + .map_err(|error| match error { + configuration::error::ParseConfigurationError::ParseError { + file_path, + line, + column, + message, + } => connector::ParseError::ParseError(connector::LocatedError { + file_path, + line, + column, + message, + }) + .into(), + configuration::error::ParseConfigurationError::EmptyConnection { file_path } => { + connector::ParseError::ValidateError(connector::InvalidNodes(vec![ + connector::InvalidNode { + file_path, + node_path: vec![connector::KeyOrIndex::Key("connection".into())], + message: "Connection must be specified".to_string(), + }, + ])) + .into() + } + // configuration::error::ParseConfigurationError::EmptyProjectId { file_path } => { + // connector::ParseError::ValidateError(connector::InvalidNodes(vec![ + // connector::InvalidNode { + // file_path, + // node_path: vec![connector::KeyOrIndex::Key("projectId".into())], + // message: "BigQuery project ID must be specified".to_string(), + // }, + // ])) + // .into() + // } + // configuration::error::ParseConfigurationError::EmptyDatasetId { file_path } => { + // connector::ParseError::ValidateError(connector::InvalidNodes(vec![ + // connector::InvalidNode { + // file_path, + // node_path: vec![connector::KeyOrIndex::Key("datasetId".into())], + // message: "BigQuery dataset ID must be specified".to_string(), + // }, + // ])) + // .into() + // } + configuration::error::ParseConfigurationError::IoError(inner) => { + connector::ParseError::IoError(inner).into() + } + configuration::error::ParseConfigurationError::IoErrorButStringified(inner) => { + inner.into() + } + configuration::error::ParseConfigurationError::DidNotFindExpectedVersionTag(_) + | configuration::error::ParseConfigurationError::UnableToParseAnyVersions(_) => { + connector::ErrorResponse::from_error(error) + } + })?; + + let runtime_configuration = + configuration::make_runtime_configuration(parsed_configuration, &self.environment) + .map_err(|error| { + match error { + configuration::error::MakeRuntimeConfigurationError::MissingEnvironmentVariable { + file_path, + message, + } => connector::ParseError::ValidateError(connector::InvalidNodes(vec![ + connector::InvalidNode { + file_path, + node_path: vec![connector::KeyOrIndex::Key("connectionUri".into())], + message, + }, + ])), + } + })?; + + Ok(Arc::new(runtime_configuration)) + } + + /// Initialize the connector's in-memory state. + /// + /// For example, any connection pools, prepared queries, + /// or other managed resources would be allocated here. + /// + /// In addition, this function should register any + /// connector-specific metrics with the metrics registry. + async fn try_init_state( + &self, + configuration: &::Configuration, + metrics: &mut prometheus::Registry, + ) -> Result<::State> { + state::create_state( + configuration, + // &configuration.pool_settings, + metrics, + // configuration.configuration_version_tag, + ) + .instrument(info_span!("Initialise state")) + .await + .map(Arc::new) + .map_err(connector::ErrorResponse::from_error) + .map_err(|err| { + tracing::error!( + meta.signal_type = "log", + event.domain = "ndc", + event.name = "Initialization error", + name = "Initialization error", + body = %err, + error = true, + ); + err + }) + } +} diff --git a/crates/ndc-dynamodb/src/error/convert.rs b/crates/ndc-dynamodb/src/error/convert.rs new file mode 100644 index 0000000..223f7fb --- /dev/null +++ b/crates/ndc-dynamodb/src/error/convert.rs @@ -0,0 +1,40 @@ +//! Functions to convert between internal error types and the error types exposed by ndc-sdk. + +use ndc_sdk::connector::{self, ErrorResponse}; + +/// Convert an error from [query_engine_execution] to [ErrorResponse]. +pub fn execution_error_to_response(error: query_engine_execution::error::Error) -> ErrorResponse { + use query_engine_execution::error::*; + match error { + Error::Query(query_error) => match &query_error { + QueryError::VariableNotFound(_) => { + connector::QueryError::new_invalid_request(&query_error.to_string()).into() + } + QueryError::NotSupported(_) => { + connector::QueryError::new_unsupported_operation(&query_error.to_string()).into() + } + QueryError::DBError(_) => { + connector::QueryError::new_unprocessable_content(&query_error.to_string()).into() + } + QueryError::DBConstraintError(_) | QueryError::MutationConstraintFailed => { + connector::MutationError::new_constraint_not_met(&query_error.to_string()).into() + } + }, + Error::DB(_) => { + ErrorResponse::new_internal_with_details(serde_json::Value::String(error.to_string())) + } + } +} + +/// Convert an error from [query_engine_translation] to [connector::QueryError]. +pub fn translation_error_to_response( + error: &query_engine_translation::translation::error::Error, +) -> ErrorResponse { + use query_engine_translation::translation::error::*; + match error { + Error::CapabilityNotSupported(_) | Error::NotImplementedYet(_) => { + connector::QueryError::new_unsupported_operation(&error.to_string()).into() + } + _ => connector::QueryError::new_invalid_request(&error.to_string()).into(), + } +} diff --git a/crates/ndc-dynamodb/src/error/mod.rs b/crates/ndc-dynamodb/src/error/mod.rs new file mode 100644 index 0000000..9177ba8 --- /dev/null +++ b/crates/ndc-dynamodb/src/error/mod.rs @@ -0,0 +1,4 @@ +//! Tools for working with error types. + +pub mod convert; +pub mod record; diff --git a/crates/ndc-dynamodb/src/error/record.rs b/crates/ndc-dynamodb/src/error/record.rs new file mode 100644 index 0000000..82294d6 --- /dev/null +++ b/crates/ndc-dynamodb/src/error/record.rs @@ -0,0 +1,45 @@ +//! Record information about errors in traces and metrics. + +use query_engine_execution::metrics; + +/// Record an execution error in the current trace, and increment a counter. +pub fn execution_error(error: &query_engine_execution::error::Error, metrics: &metrics::Metrics) { + use query_engine_execution::error::*; + tracing::error!("{}", error); + match error { + Error::Query(err) => match &err { + QueryError::VariableNotFound(_) + | QueryError::DBError(_) + | QueryError::MutationConstraintFailed + | QueryError::DBConstraintError(_) => { + metrics.error_metrics.record_invalid_request(); + } + QueryError::NotSupported(_) => { + metrics.error_metrics.record_unsupported_feature(); + } + }, + Error::DB(_) => { + metrics.error_metrics.record_database_error(); + } + } +} + +/// Record a translation error in the current trace, and increment a counter. +pub fn translation_error( + error: &query_engine_translation::translation::error::Error, + metrics: &metrics::Metrics, +) { + use query_engine_translation::translation::error::*; + tracing::error!("{}", error); + match error { + Error::CapabilityNotSupported(_) => { + metrics.error_metrics.record_unsupported_capability(); + } + Error::NotImplementedYet(_) => { + metrics.error_metrics.record_unsupported_feature(); + } + _ => { + metrics.error_metrics.record_invalid_request(); + } + } +} diff --git a/crates/ndc-dynamodb/src/health.rs b/crates/ndc-dynamodb/src/health.rs new file mode 100644 index 0000000..d4d5ebd --- /dev/null +++ b/crates/ndc-dynamodb/src/health.rs @@ -0,0 +1,47 @@ +//! Health check endpoint for the connector. + +use ndc_sdk::connector::ErrorResponse; + +use aws_sdk_dynamodb::Client; + +/// Check the health of the connector. +/// +/// For example, this function should check that the connector +/// is able to reach its data source over the network. +/// TODO +pub async fn health_check(client: &Client) -> Result<(), ErrorResponse> { + // Query + // let mut rs = client + // .job() + // .query( + // project_id, + // QueryRequest::new("SELECT 1 as count".to_string()), + // ) + // .await + // .unwrap(); + + let tables_result = client.list_tables().send().await; + let tables = tables_result.map_err(|_op| { + ndc_dynamodb_configuration::error::ParseConfigurationError::IoErrorButStringified( + "Failed to list tables".to_string(), + ) + }); //TODO: handle error + + match tables { + Ok(_res) => Ok(()), + Err(_e) => Err(ErrorResponse::new_internal_with_details( + serde_json::Value::Null, + )), + } + + // // silly check + // let mut count = 0; + + // while rs.next_row() { + // count += 1; + // } + + // assert_eq!(count, 1); + + // Ok(()) +} diff --git a/crates/ndc-dynamodb/src/lib.rs b/crates/ndc-dynamodb/src/lib.rs new file mode 100644 index 0000000..02a75d4 --- /dev/null +++ b/crates/ndc-dynamodb/src/lib.rs @@ -0,0 +1,12 @@ +//! A Hasura v3 DynamoDB Data Connector. + +pub mod capabilities; +pub mod connector; +pub mod error; +pub mod health; +pub mod query; +pub mod schema; +pub mod state; + +// we expose the sdk used for this connector so that we are able to use it in multitenant +pub use ndc_sdk as sdk; diff --git a/crates/ndc-dynamodb/src/main.rs b/crates/ndc-dynamodb/src/main.rs new file mode 100644 index 0000000..e7a11a9 --- /dev/null +++ b/crates/ndc-dynamodb/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, world!"); +} diff --git a/crates/ndc-dynamodb/src/query.rs b/crates/ndc-dynamodb/src/query.rs new file mode 100644 index 0000000..7ee02eb --- /dev/null +++ b/crates/ndc-dynamodb/src/query.rs @@ -0,0 +1,87 @@ +//! Implement the `/query` endpoint to run a query against postgres. +//! See the Hasura +//! [Native Data Connector Specification](https://hasura.github.io/ndc-spec/specification/queries/index.html) +//! for further details. + +use tracing::{info_span, Instrument}; + +// pub use explain::explain; +use ndc_sdk::connector; +use ndc_sdk::json_response::JsonResponse; +use ndc_sdk::models; +use query_engine_sql::sql; +use query_engine_translation::translation; + +// use super::configuration; +use ndc_dynamodb_configuration as configuration; + +use crate::error::convert; +use crate::error::record; +use crate::state; + +/// Execute a query +/// +/// This function implements the [query endpoint](https://hasura.github.io/ndc-spec/specification/queries/index.html) +/// from the NDC specification. +pub async fn query( + configuration: &configuration::Configuration, + state: &state::State, + query_request: models::QueryRequest, +) -> Result, connector::ErrorResponse> { + // let timer = state.query_metrics.time_query_total(); + + // See https://docs.rs/tracing/0.1.29/tracing/span/struct.Span.html#in-asynchronous-code + async move { + tracing::info!( + query_request_json = serde_json::to_string(&query_request).unwrap(), + query_request = ?query_request + ); + + let plan = async { + plan_query(configuration, state, query_request).map_err(|err| { + record::translation_error(&err, &state.metrics); + convert::translation_error_to_response(&err) + }) + } + .instrument(info_span!("Plan query")) + .await?; + + let result = async { + execute_query(state, plan).await.map_err(|err| { + record::execution_error(&err, &state.metrics); + convert::execution_error_to_response(err) + }) + } + .instrument(info_span!("Execute query")) + .await?; + + state.metrics.record_successful_query(); + Ok(result) + } + .instrument(info_span!("/query")) + .await + + // timer.complete_with(result) +} + +fn plan_query( + configuration: &configuration::Configuration, + state: &state::State, + query_request: models::QueryRequest, +) -> Result, translation::error::Error> +{ + let timer = state.metrics.time_query_plan(); + let result = translation::query::translate(&configuration.metadata, query_request); + timer.complete_with(result) +} + +async fn execute_query( + state: &state::State, + plan: sql::execution_plan::ExecutionPlan, +) -> Result, query_engine_execution::error::Error> { + let timer = state.metrics.time_query_execution(); + let result = query_engine_execution::query::execute(&state.client, &state.metrics, plan) + .await + .map(JsonResponse::Serialized); + timer.complete_with(result) +} diff --git a/crates/ndc-dynamodb/src/schema.rs b/crates/ndc-dynamodb/src/schema.rs new file mode 100644 index 0000000..50d5db2 --- /dev/null +++ b/crates/ndc-dynamodb/src/schema.rs @@ -0,0 +1,178 @@ +//! Implement the `/schema` endpoint to return the connector's schema. +//! See the Hasura +//! [Native Data Connector Specification](https://hasura.github.io/ndc-spec/specification/schema/index.html) +//! for further details. + +use std::collections::BTreeMap; + +use ndc_sdk::connector; +use ndc_sdk::models; +use query_engine_metadata::metadata; +use query_engine_metadata::metadata::OperatorKind; + +use ndc_dynamodb_configuration::configuration; + +/// Get the connector's schema. +/// +/// This function implements the [schema endpoint](https://hasura.github.io/ndc-spec/specification/schema/index.html) +/// from the NDC specification. +pub fn get_schema( + configuration: &configuration::Configuration, +) -> Result { + let metadata = &configuration.metadata; + let scalar_types: BTreeMap = metadata + .scalar_types + .0 + .iter() + .map(|(scalar_type_name, scalar_type_info)| { + let result = models::ScalarType { + representation: scalar_type_info + .type_representation + .as_ref() + .map(map_type_representation), + aggregate_functions: BTreeMap::new(), + comparison_operators: scalar_type_info + .comparison_operators + .iter() + .map(|(op_name, op_def)| { + ( + op_name.clone(), + match op_def.operator_kind { + OperatorKind::Equal => models::ComparisonOperatorDefinition::Equal, + OperatorKind::In => models::ComparisonOperatorDefinition::In, + OperatorKind::Custom => { + models::ComparisonOperatorDefinition::Custom { + argument_type: models::Type::Named { + name: op_def.argument_type.as_str().into(), + }, + } + } + }, + ) + }) + .collect(), + }; + (scalar_type_name.clone(), result) + }) + .collect(); + + let collections = metadata + .tables + .0 + .iter() + .map(|(table_name, table)| models::CollectionInfo { + name: table_name.clone(), + description: table.description.clone(), + arguments: BTreeMap::new(), + collection_type: table_name.as_str().into(), + uniqueness_constraints: BTreeMap::new(), + foreign_keys: BTreeMap::new(), + }) + .collect(); + + let object_types = metadata //BTreeMap::from_iter(metadata.tables.0.iter().map(|(table_name, table)| { + .tables + .0 + .iter() + .map(|(table_name, table)| { + let object_type = models::ObjectType { + description: table.description.clone(), + fields: table + .columns + .iter() + .map(|(column_name, column_info)| { + ( + column_name.clone(), + models::ObjectField { + description: column_info.description.clone(), + r#type: column_to_type(column_info), + arguments: BTreeMap::new(), + }, + ) + }) + .collect(), + }; + (table_name.as_str().into(), object_type) + }) + .collect::>(); + + Ok(models::SchemaResponse { + collections, + procedures: vec![], + functions: vec![], + object_types, + scalar_types, + }) +} + +/// Map our local type representation to ndc-spec type representation. +#[allow(clippy::match_same_arms)] // merging arms would require changing the order, making this harder to understand +fn map_type_representation( + type_representation: &metadata::TypeRepresentation, +) -> models::TypeRepresentation { + match type_representation { + metadata::TypeRepresentation::Boolean => models::TypeRepresentation::Boolean, + metadata::TypeRepresentation::String => models::TypeRepresentation::String, + metadata::TypeRepresentation::Float32 => models::TypeRepresentation::Float32, + metadata::TypeRepresentation::Float64 => models::TypeRepresentation::Float64, + metadata::TypeRepresentation::Int16 => models::TypeRepresentation::Int16, + metadata::TypeRepresentation::Int32 => models::TypeRepresentation::Int32, + // Int64 returns a number. + metadata::TypeRepresentation::Int64 => models::TypeRepresentation::JSON, + // Int64AsString returns a string. + metadata::TypeRepresentation::Int64AsString => models::TypeRepresentation::Int64, + // BigDecimal returns a number. + metadata::TypeRepresentation::BigDecimal => models::TypeRepresentation::JSON, + // BigDecimalAsString returns a string. + metadata::TypeRepresentation::BigDecimalAsString => models::TypeRepresentation::BigDecimal, + metadata::TypeRepresentation::Timestamp => models::TypeRepresentation::Timestamp, + metadata::TypeRepresentation::Timestamptz => models::TypeRepresentation::TimestampTZ, + metadata::TypeRepresentation::Time => models::TypeRepresentation::String, + metadata::TypeRepresentation::Timetz => models::TypeRepresentation::String, + metadata::TypeRepresentation::Date => models::TypeRepresentation::Date, + metadata::TypeRepresentation::Geometry => models::TypeRepresentation::Geometry, + metadata::TypeRepresentation::Geography => models::TypeRepresentation::Geography, + metadata::TypeRepresentation::UUID => models::TypeRepresentation::UUID, + metadata::TypeRepresentation::Json => models::TypeRepresentation::JSON, + metadata::TypeRepresentation::Enum(variants) => models::TypeRepresentation::Enum { + one_of: variants.clone(), + }, + } +} + +// ! Helper functions for generating ndc-spec schema objects. + +// use ndc_sdk::models; + +// use query_engine_metadata::metadata; + +/// Extract the models::Type representation of a column. +pub fn column_to_type(column: &metadata::ColumnInfo) -> models::Type { + match &column.nullable { + metadata::Nullable::NonNullable => type_to_type(&column.r#type), + metadata::Nullable::Nullable => models::Type::Nullable { + underlying_type: Box::new(type_to_type(&column.r#type)), + }, + } +} + +// /// Extract the models::Type representation of a readonly column. +// pub fn readonly_column_to_type(column: &metadata::ReadOnlyColumnInfo) -> models::Type { +// match &column.nullable { +// metadata::Nullable::NonNullable => type_to_type(&column.r#type), +// metadata::Nullable::Nullable => models::Type::Nullable { +// underlying_type: Box::new(type_to_type(&column.r#type)), +// }, +// } +// } + +pub fn type_to_type(typ: &metadata::Type) -> models::Type { + match typ { + metadata::Type::ArrayType(typ) => models::Type::Array { + element_type: Box::new(type_to_type(typ)), + }, + metadata::Type::ScalarType(scalar_type) => models::Type::Named { + name: scalar_type.as_str().into(), + }, + } +} diff --git a/crates/ndc-dynamodb/src/state.rs b/crates/ndc-dynamodb/src/state.rs new file mode 100644 index 0000000..9992f43 --- /dev/null +++ b/crates/ndc-dynamodb/src/state.rs @@ -0,0 +1,60 @@ +//! Transient state used by the connector. +//! +//! This is initialized on startup. + +use aws_sdk_dynamodb::Config; +use thiserror::Error; +use tracing::{info_span, Instrument}; + +use query_engine_execution::metrics; + +/// State for our connector. +#[derive(Clone)] +pub struct State { + pub metrics: metrics::Metrics, + pub client: aws_sdk_dynamodb::Client, +} + +/// Create a connection pool and wrap it inside a connector State. +pub async fn create_state( + configuration: &ndc_dynamodb_configuration::Configuration, + metrics_registry: &mut prometheus::Registry, +) -> Result { + let metrics = async { + let metrics_inner = metrics::Metrics::initialize(metrics_registry) + .map_err(InitializationError::MetricsError)?; + Ok(metrics_inner) + } + .instrument(info_span!("Setup metrics")) + .await?; + + let access_key_id = configuration.access_key_id.clone(); + let secret_access_key = configuration.secret_access_key.clone(); + let region = configuration.region.clone(); + + let credentials = aws_sdk_dynamodb::config::Credentials::new( + access_key_id, + secret_access_key, + None, // Optional session token + None, // Expiration (None for non-expiring) + "my-provider", // Provider name + ); + + let config = Config::builder() + .region(aws_config::Region::new(region)) + .credentials_provider(credentials) + .build(); + + let client = aws_sdk_dynamodb::Client::from_conf(config); + + Ok(State { metrics, client }) +} + +/// State initialization error. +#[derive(Debug, Error)] +pub enum InitializationError { + #[error("unable to initialize connection pool: {0}")] + UnableToCreatePool(sqlx::Error), + #[error("error initializing metrics: {0}")] + MetricsError(prometheus::Error), +} diff --git a/crates/query-engine/execution/Cargo.toml b/crates/query-engine/execution/Cargo.toml new file mode 100644 index 0000000..ac0a647 --- /dev/null +++ b/crates/query-engine/execution/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "query-engine-execution" +version.workspace = true +edition.workspace = true +license.workspace = true + +[lints] +workspace = true + +[dependencies] +ndc-models = { workspace = true } + +query-engine-sql = { path = "../sql" } + +prometheus = { workspace = true } +serde_json = { workspace = true } +sqlformat = { workspace = true } +sqlx = { workspace = true, features = [ "json", "postgres", "runtime-tokio-rustls", "uuid" ] } +thiserror = { workspace = true } +tracing = { workspace = true } +bytes = { workspace = true } +aws-config = { workspace = true } +aws-sdk-dynamodb = { workspace = true } +base64 = { workspace = true } diff --git a/crates/query-engine/execution/src/error.rs b/crates/query-engine/execution/src/error.rs new file mode 100644 index 0000000..a0c5e51 --- /dev/null +++ b/crates/query-engine/execution/src/error.rs @@ -0,0 +1,46 @@ +/// Errors +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("{0}")] + Query(QueryError), + #[error("{0}")] + DB(sqlx::Error), +} + +/// Query planning error. +#[derive(Debug, thiserror::Error)] +pub enum QueryError { + #[error("Variable {0:?} not found.")] + VariableNotFound(String), + #[error("{0} are not supported.")] + NotSupported(String), + #[error("{0}")] + DBError(sqlx::Error), + #[error("{0}")] + DBConstraintError(sqlx::Error), + #[error("Mutation constraint failed.")] + MutationConstraintFailed, +} + +impl From for Error { + fn from(err: sqlx::Error) -> Error { + match err + .as_database_error() + .and_then(|e| e.try_downcast_ref()) + .map(|e: &sqlx::postgres::PgDatabaseError| e.code()) + { + None => Error::DB(err), + Some(code) => { + // We want to map data and constraint exceptions to query errors + // https://www.postgresql.org/docs/current/errcodes-appendix.html + if code.starts_with("22") { + Error::Query(QueryError::DBError(err)) + } else if code.starts_with("23") { + Error::Query(QueryError::DBConstraintError(err)) + } else { + Error::DB(err) + } + } + } + } +} diff --git a/crates/query-engine/execution/src/lib.rs b/crates/query-engine/execution/src/lib.rs new file mode 100644 index 0000000..c76471c --- /dev/null +++ b/crates/query-engine/execution/src/lib.rs @@ -0,0 +1,6 @@ +//! Query execution against a PostgreSQL database. +//! See `/architecture.md#execution` in the repository for more details. + +pub mod error; +pub mod metrics; +pub mod query; diff --git a/crates/query-engine/execution/src/metrics.rs b/crates/query-engine/execution/src/metrics.rs new file mode 100644 index 0000000..0e388f3 --- /dev/null +++ b/crates/query-engine/execution/src/metrics.rs @@ -0,0 +1,314 @@ +//! Metrics setup and update for our connector. + +use std::time::Duration; + +use prometheus::{Gauge, Histogram, HistogramTimer, IntCounter, IntGauge, Registry}; + +/// The collection of all metrics exposed through the `/metrics` endpoint. +#[derive(Debug, Clone)] +pub struct Metrics { + query_total: IntCounter, + explain_total: IntCounter, + query_plan_time: Histogram, + query_execution_time: Histogram, + pool_max_connections: IntGauge, + pool_min_connections: IntGauge, + pool_acquire_timeout: Gauge, + pool_max_lifetime: Gauge, + pool_idle_timeout: Gauge, + pub error_metrics: ErrorMetrics, +} + +impl Metrics { + /// Set up counters and gauges used to produce Prometheus metrics + pub fn initialize(metrics_registry: &mut Registry) -> Result { + let query_total = add_int_counter_metric( + metrics_registry, + "postgres_ndc_query_total", + "Total successful queries.", + )?; + + let explain_total = add_int_counter_metric( + metrics_registry, + "postgres_ndc_explain_total", + "Total successful explains.", + )?; + + let query_plan_time = add_histogram_metric( + metrics_registry, + "postgres_ndc_query_plan_time", + "Time taken to plan a query for execution, in seconds.", + )?; + + let query_execution_time = add_histogram_metric( + metrics_registry, + "postgres_ndc_query_execution_time", + "Time taken to execute an already-planned query, in seconds.", + )?; + + let pool_max_connections = add_int_gauge_metric( + metrics_registry, + "postgres_ndc_pool_max_connections", + "The maximum number of connections that this pool should maintain.", + )?; + + let pool_min_connections = add_int_gauge_metric( + metrics_registry, + "postgres_ndc_pool_min_connections", + "The minimum number of connections that this pool should maintain.", + )?; + + let pool_acquire_timeout = add_gauge_metric( + metrics_registry, + "postgres_ndc_pool_acquire_timeout", + "Get the maximum amount of time to spend waiting for a connection, in seconds.", + )?; + + let pool_idle_timeout = add_gauge_metric( + metrics_registry, + "postgres_ndc_pool_idle_timeout", + "Get the maximum idle duration for individual connections, in seconds.", + )?; + + let pool_max_lifetime = add_gauge_metric( + metrics_registry, + "postgres_ndc_pool_max_lifetime", + "Get the maximum lifetime of individual connections, in seconds.", + )?; + + let error_metrics = ErrorMetrics::initialize(metrics_registry)?; + + Ok(Self { + query_total, + explain_total, + query_plan_time, + query_execution_time, + pool_max_connections, + pool_min_connections, + pool_acquire_timeout, + pool_max_lifetime, + pool_idle_timeout, + error_metrics, + }) + } + + pub fn record_successful_query(&self) { + self.query_total.inc(); + } + + pub fn record_successful_explain(&self) { + self.explain_total.inc(); + } + + pub fn time_query_plan(&self) -> Timer { + Timer(self.query_plan_time.start_timer()) + } + + pub fn time_query_execution(&self) -> Timer { + Timer(self.query_execution_time.start_timer()) + } + + // Set the metrics populated from the pool options. + // + // This only needs to be called once, as the options don't change. + pub fn set_pool_options_metrics(&self, pool_options: &sqlx::pool::PoolOptions) { + let max_connections: i64 = pool_options.get_max_connections().into(); + self.pool_max_connections.set(max_connections); + + let min_connections: i64 = pool_options.get_min_connections().into(); + self.pool_min_connections.set(min_connections); + + let acquire_timeout: f64 = pool_options.get_acquire_timeout().as_secs_f64(); + self.pool_acquire_timeout.set(acquire_timeout); + + // if nothing is set, return 0 + let idle_timeout: f64 = pool_options + .get_idle_timeout() + .unwrap_or(Duration::ZERO) + .as_secs_f64(); + self.pool_idle_timeout.set(idle_timeout); + + // if nothing is set, return 0 + let max_lifetime: f64 = pool_options + .get_max_lifetime() + .unwrap_or(Duration::ZERO) + .as_secs_f64(); + self.pool_max_lifetime.set(max_lifetime); + } +} + +/// Create a new int counter metric and register it with the provided Prometheus Registry +fn add_int_counter_metric( + metrics_registry: &mut Registry, + metric_name: &str, + metric_description: &str, +) -> Result { + let int_counter = + IntCounter::with_opts(prometheus::Opts::new(metric_name, metric_description))?; + register_collector(metrics_registry, int_counter) +} + +/// Create a new int gauge metric and register it with the provided Prometheus Registry +fn add_int_gauge_metric( + metrics_registry: &mut Registry, + metric_name: &str, + metric_description: &str, +) -> Result { + let int_gauge = IntGauge::with_opts(prometheus::Opts::new(metric_name, metric_description))?; + register_collector(metrics_registry, int_gauge) +} + +/// Create a new gauge metric and register it with the provided Prometheus Registry +fn add_gauge_metric( + metrics_registry: &mut Registry, + metric_name: &str, + metric_description: &str, +) -> Result { + let gauge = Gauge::with_opts(prometheus::Opts::new(metric_name, metric_description))?; + register_collector(metrics_registry, gauge) +} + +/// Create a new histogram metric using the default buckets, and register it with the provided +/// Prometheus Registry. +fn add_histogram_metric( + metrics_registry: &mut prometheus::Registry, + metric_name: &str, + metric_description: &str, +) -> Result { + let histogram = Histogram::with_opts(prometheus::HistogramOpts::new( + metric_name, + metric_description, + ))?; + register_collector(metrics_registry, histogram) +} + +/// Register a new collector with the registry, and returns it for later use. +fn register_collector( + metrics_registry: &mut Registry, + collector: Collector, +) -> Result { + metrics_registry.register(Box::new(collector.clone()))?; + Ok(collector) +} + +/// A wrapper around the Prometheus [HistogramTimer] that can make a decision +/// on whether to record or not based on a result. +pub struct Timer(HistogramTimer); + +impl Timer { + /// Stops the timer, recording if the result is `Ok`, and discarding it if + /// the result is an `Err`. It returns its input for convenience. + pub fn complete_with(self, result: Result) -> Result { + match result { + Ok(_) => { + self.0.stop_and_record(); + } + Err(_) => { + self.0.stop_and_discard(); + } + }; + result + } +} + +// /// A wrapper around the internal Prometheus error type to avoid exposing more +// /// than we need. +// #[derive(Debug)] +// pub struct Error(prometheus::Error); + +// impl std::fmt::Display for Error { +// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +// self.0.fmt(f) +// } +// } + +// impl std::error::Error for Error {} + +/// A collection of metrics indicating errors. +#[derive(Debug, Clone)] +pub struct ErrorMetrics { + /// the connector received an invalid request. + invalid_request_total: IntCounter, + /// the connector received a request using capabilities it does not support. + unsupported_capability_total: IntCounter, + /// the connector could not fulfill a request because it does not support + /// certain features (which are not described as capabilities). + unsupported_feature_total: IntCounter, + /// the connector had an internal error. + connector_error_total: IntCounter, + /// the database emmited an error. + database_error_total: IntCounter, + /// we failed to acquire a database connection from the pool + connection_acquisition_error_total: IntCounter, +} + +impl ErrorMetrics { + /// Set up counters and gauges used to produce Prometheus metrics + pub fn initialize( + metrics_registry: &mut prometheus::Registry, + ) -> Result { + let invalid_request_total = add_int_counter_metric( + metrics_registry, + "ndc_postgres_error_invalid_request_total_count", + "Total number of invalid requests encountered.", + )?; + + let unsupported_capability_total = add_int_counter_metric( + metrics_registry, + "ndc_postgres_error_unsupported_capability_total_count", + "Total number of invalid requests with unsupported capabilities encountered.", + )?; + + let unsupported_feature_total = add_int_counter_metric( + metrics_registry, + "ndc_postgres_error_unsupported_capabilities_total_count", + "Total number of invalid requests with unsupported capabilities encountered.", + )?; + + let connector_error_total = add_int_counter_metric( + metrics_registry, + "ndc_postgres_error_connector_error_total_count", + "Total number of requests failed due to an internal conenctor error.", + )?; + + let database_error_total = add_int_counter_metric( + metrics_registry, + "ndc_postgres_error_database_error_total_count", + "Total number of requests failed due to a database error.", + )?; + + let connection_acquisition_error_total = add_int_counter_metric( + metrics_registry, + "ndc_postgres_error_connection_acquisition_error_total_count", + "Total number of failures to acquire a database connection.", + )?; + + Ok(ErrorMetrics { + invalid_request_total, + unsupported_capability_total, + unsupported_feature_total, + connector_error_total, + database_error_total, + connection_acquisition_error_total, + }) + } + + pub fn record_invalid_request(&self) { + self.invalid_request_total.inc(); + } + pub fn record_unsupported_capability(&self) { + self.unsupported_capability_total.inc(); + } + pub fn record_unsupported_feature(&self) { + self.unsupported_feature_total.inc(); + } + pub fn record_connector_error(&self) { + self.connector_error_total.inc(); + } + pub fn record_database_error(&self) { + self.database_error_total.inc(); + } + pub fn record_connection_acquisition_error(&self) { + self.connection_acquisition_error_total.inc(); + } +} diff --git a/crates/query-engine/execution/src/query.rs b/crates/query-engine/execution/src/query.rs new file mode 100644 index 0000000..d54b491 --- /dev/null +++ b/crates/query-engine/execution/src/query.rs @@ -0,0 +1,129 @@ +//! Execute an execution plan against the database. + +use std::{collections::HashMap, vec}; + +use crate::error::Error; +use crate::metrics; +use aws_sdk_dynamodb::Client; +use base64; +use bytes::{BufMut, Bytes, BytesMut}; +use serde_json::{self, to_string, Value}; + +use query_engine_sql::sql; + +/// Execute a query against dynamodb. +#[allow(clippy::cast_possible_wrap)] +pub async fn execute( + client: &Client, + _metrics: &metrics::Metrics, + plan: sql::execution_plan::ExecutionPlan, +) -> Result { + let mut buffer = BytesMut::new(); + + // run the query on each set of variables. The result is a vector of rows each + // element in the vector is the result of running the query on one set of variables. + match plan.query.variables { + None => { + let query_request = plan.query.query_sql().sql; + let query_limit: Option = plan.query.limit.map(|limit| limit as i32); + + // Query + let rs = client + .execute_statement() + .statement(query_request.to_string()) + .set_parameters(None) + .set_limit(query_limit) + .send() + .await + .unwrap(); + + let mut res_map: Vec> = vec![]; + + for item in &rs.items.unwrap() { + let mut hashmap = HashMap::new(); + for (key, attribute_value) in item.clone() { + if attribute_value.is_s() { + let s = attribute_value.as_s().unwrap().to_string(); + println!("String: {s}"); + hashmap.insert(key, s); + } else if attribute_value.is_n() { + let n = attribute_value.as_n().unwrap().to_string(); + println!("Number: {n}"); + hashmap.insert(key, n); + } else if attribute_value.is_bool() { + let bool = attribute_value.as_bool().unwrap(); + let bool_str = bool.to_string(); + hashmap.insert(key, bool_str); + } else if attribute_value.is_null() { + hashmap.insert(key, "null".to_string()); + } else if attribute_value.is_b() { + let b = attribute_value.as_b().unwrap(); + let b_str = base64::Engine::encode( + &base64::engine::general_purpose::STANDARD, + b.as_ref(), + ); + hashmap.insert(key, b_str); + } else if attribute_value.is_ss() { + let ss = attribute_value.as_ss().unwrap(); + let ss_str = ss.join(", "); + hashmap.insert(key, ss_str); + } else if attribute_value.is_ns() { + let ns = attribute_value.as_ns().unwrap(); + let ns_str = ns.join(", "); + hashmap.insert(key, ns_str); + } else if attribute_value.is_bs() { + let bs = attribute_value.as_bs().unwrap(); + let bs_str = bs + .iter() + .map(|b| { + base64::Engine::encode( + &base64::engine::general_purpose::STANDARD, + b.as_ref(), + ) + }) + .collect::>() + .join(", "); + hashmap.insert(key, bs_str); + } else if attribute_value.is_m() { + let m = attribute_value.as_m().unwrap(); + let mut m_str = "{".to_string(); + for (k, v) in m { + m_str.push_str(&format!("{k}: {v:?}, ")); + } + m_str.push('}'); + hashmap.insert(key, m_str); + } else if attribute_value.is_l() { + let l = attribute_value.as_l().unwrap(); + let mut l_str = "[".to_string(); + for v in l { + l_str.push_str(&format!("{v:?}, ")); + } + l_str.push(']'); + hashmap.insert(key, l_str); + } else { + println!("Unknown"); + } + } + res_map.push(hashmap); + } + + let mut rows: HashMap>> = HashMap::new(); + rows.insert("rows".into(), res_map); + + let rows_stringified = serde_json::to_string(&rows).unwrap(); + + let row_value: Value = serde_json::from_str(&rows_stringified).unwrap(); + + let row_value_array = Value::Array(vec![row_value]); + let final_row = to_string(&row_value_array).unwrap(); + + let b: Bytes = Bytes::from(final_row); + buffer.put(b); + } + Some(_variable_sets) => { + todo!("foreach/variables not implemented in query engine / execution") + } + }; + + Ok(buffer.freeze()) +} diff --git a/crates/query-engine/metadata/Cargo.toml b/crates/query-engine/metadata/Cargo.toml new file mode 100644 index 0000000..2ff8f4f --- /dev/null +++ b/crates/query-engine/metadata/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "query-engine-metadata" +version.workspace = true +edition.workspace = true +license.workspace = true + +[lints] +workspace = true + +[dependencies] +ndc-models = { workspace = true } +smol_str = { workspace = true } +serde = { workspace = true } +schemars = { workspace = true, features = ["smol_str"] } diff --git a/crates/query-engine/metadata/src/lib.rs b/crates/query-engine/metadata/src/lib.rs new file mode 100644 index 0000000..d064e80 --- /dev/null +++ b/crates/query-engine/metadata/src/lib.rs @@ -0,0 +1,3 @@ +//! This crate contains metadata information regarding the database and tracked information. + +pub mod metadata; diff --git a/crates/query-engine/metadata/src/metadata/database.rs b/crates/query-engine/metadata/src/metadata/database.rs new file mode 100644 index 0000000..5a76fb5 --- /dev/null +++ b/crates/query-engine/metadata/src/metadata/database.rs @@ -0,0 +1,286 @@ +//! Metadata information regarding the database and tracked information. + +use models::ScalarTypeName; +use ndc_models as models; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::collections::{BTreeMap, BTreeSet}; + +/// The scalar types supported by the Engine. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct ScalarTypeTypeName(pub String); + +/// The type of values that a column, field, or argument may take. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub enum Type { + ScalarType(models::ScalarTypeName), + ArrayType(Box), +} + +/// Map of all known/occurring scalar types. +#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct ScalarTypes(pub BTreeMap); + +impl ScalarTypes { + pub fn empty() -> Self { + ScalarTypes(BTreeMap::new()) + } +} + +/// Information about a scalar type. A scalar type is completely characterized by its name and the +/// operations you can do on it. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct ScalarType { + pub type_name: ScalarTypeName, + // pub schema_name: Option, // TODO(PY): see what happens when option is removed + pub description: Option, + pub comparison_operators: BTreeMap, + pub type_representation: Option, +} + +/// The complete list of supported binary operators for scalar types. +/// Not all of these are supported for every type. +#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct ComparisonOperators( + pub BTreeMap< + models::ScalarTypeName, + BTreeMap, + >, +); + +impl ComparisonOperators { + pub fn empty() -> Self { + ComparisonOperators(BTreeMap::new()) + } +} + +/// Information about a composite type field. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct FieldInfo { + pub field_name: String, + pub r#type: Type, + pub description: Option, +} + +/// Represents a postgres binary comparison operator +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct ComparisonOperator { + pub operator_name: String, + pub operator_kind: OperatorKind, + pub argument_type: models::ScalarTypeName, + pub is_infix: bool, +} + +/// Is it a built-in operator, or a custom operator. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub enum OperatorKind { + Equal, + In, + Custom, +} + +/// Mapping from a "table" name to its information. +#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct TablesInfo(pub BTreeMap); + +impl TablesInfo { + pub fn empty() -> Self { + TablesInfo(BTreeMap::new()) + } + + pub fn merge(&mut self, other: TablesInfo) { + for (table_name, table_info) in other.0 { + self.0.insert(table_name, table_info); + } + } +} + +/// Information about a database table (or any other kind of relation). +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct TableInfo { + // pub schema_name: String, + pub table_name: String, + pub columns: BTreeMap, + pub partition_key: String, + pub sort_key: String, + + // pub uniqueness_constraints: UniquenessConstraints, + pub gsi: GlobalSecondaryIndexes, + + pub description: Option, +} + +/// Can this column contain null values +#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub enum Nullable { + #[default] + Nullable, + NonNullable, +} + +/// Does this column have a default value. +#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub enum HasDefault { + #[default] + NoDefault, + HasDefault, +} + +/// Is this column an identity column. +#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub enum IsIdentity { + #[default] + NotIdentity, + IdentityByDefault, + IdentityAlways, +} + +/// Is this column a generated column. +#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub enum IsGenerated { + #[default] + NotGenerated, + Stored, +} + +/// Information about a database column. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct ColumnInfo { + pub name: String, + pub r#type: Type, + #[serde(default)] + pub nullable: Nullable, + #[serde(default)] + pub description: Option, +} + +/// A mapping from the name of a unique constraint to its value. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct UniquenessConstraints(pub BTreeMap); + +/// The set of columns that make up a uniqueness constraint. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct UniquenessConstraint(pub BTreeSet); + +/// A mapping from the name of a foreign key constraint to its value. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct ForeignRelations(pub BTreeMap); + +/// A foreign key constraint. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] + +pub struct ForeignRelation { + pub foreign_schema: Option, + pub foreign_table: String, + pub column_mapping: BTreeMap, +} + +/// A mapping from the name of a foreign key constraint to its value. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct GlobalSecondaryIndexes(pub BTreeMap); + +/// A Global Secondary Index. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct GlobalSecondaryIndexInfo { + pub partition_key: String, + pub sort_key: Option, + pub projection_type: ProjectionTypeInfo, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct ProjectionTypeInfo { + pub projection_type: String, + pub non_key_attributes: Vec, +} + +/// All supported aggregate functions, grouped by type. +#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, JsonSchema)] +pub struct AggregateFunctions( + pub BTreeMap>, +); + +impl AggregateFunctions { + pub fn empty() -> Self { + AggregateFunctions(BTreeMap::new()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct AggregateFunction { + pub return_type: models::TypeName, +} + +/// Type representation of scalar types, grouped by type. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct TypeRepresentations(pub BTreeMap); + +impl TypeRepresentations { + pub fn empty() -> Self { + TypeRepresentations(BTreeMap::new()) + } +} + +/// Type representation of a scalar type. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub enum TypeRepresentation { + /// JSON booleans + Boolean, + /// Any JSON string + String, + /// float4 + Float32, + /// float8 + Float64, + /// int2 + Int16, + /// int4 + Int32, + /// int8 as integer + Int64, + /// int8 as string + Int64AsString, + /// numeric + BigDecimal, + /// numeric as string + BigDecimalAsString, + /// timestamp + Timestamp, + /// timestamp with timezone + Timestamptz, + /// time + Time, + /// time with timezone + Timetz, + /// date + Date, + /// uuid + UUID, + /// geography + Geography, + /// geometry + Geometry, + /// An arbitrary json. + Json, + /// One of the specified string values + Enum(Vec), +} diff --git a/crates/query-engine/metadata/src/metadata/mod.rs b/crates/query-engine/metadata/src/metadata/mod.rs new file mode 100644 index 0000000..17b39e8 --- /dev/null +++ b/crates/query-engine/metadata/src/metadata/mod.rs @@ -0,0 +1,25 @@ +//! Metadata information regarding the database and tracked information. + +pub mod database; + +// re-export without modules +pub use database::*; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +/// Metadata information. +#[derive(Clone, PartialEq, Eq, Debug, Default, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct Metadata { + pub tables: TablesInfo, + pub scalar_types: ScalarTypes, +} + +impl Metadata { + pub fn empty() -> Self { + Metadata { + tables: TablesInfo::empty(), + scalar_types: ScalarTypes::empty(), + } + } +} diff --git a/crates/query-engine/sql/Cargo.toml b/crates/query-engine/sql/Cargo.toml new file mode 100644 index 0000000..8329a00 --- /dev/null +++ b/crates/query-engine/sql/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "query-engine-sql" +version.workspace = true +edition.workspace = true +license.workspace = true + +[lints] +workspace = true + +[dependencies] +ndc-models = { workspace = true } + +smol_str = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +schemars = { workspace = true, features = ["smol_str"] } diff --git a/crates/query-engine/sql/src/lib.rs b/crates/query-engine/sql/src/lib.rs new file mode 100644 index 0000000..44f4127 --- /dev/null +++ b/crates/query-engine/sql/src/lib.rs @@ -0,0 +1,4 @@ +//! This crate contains a subset of PostgreSQL AST and prettyprinter. +//! See `/architecture.md#sql-ast` in the repository for more details. + +pub mod sql; diff --git a/crates/query-engine/sql/src/sql/ast.rs b/crates/query-engine/sql/src/sql/ast.rs new file mode 100644 index 0000000..470540a --- /dev/null +++ b/crates/query-engine/sql/src/sql/ast.rs @@ -0,0 +1,454 @@ +//! Type definitions of a SQL AST representation. + +use std::collections::BTreeMap; + +/// An EXPLAIN clause +#[derive(Debug, Clone, PartialEq)] +pub enum Explain<'a> { + Select(&'a Select), +} + +/// A WITH clause +#[derive(Debug, Clone, PartialEq)] +pub struct With { + pub common_table_expressions: Vec, +} + +/// A single Common Table Expression +#[derive(Debug, Clone, PartialEq)] +pub struct CommonTableExpression { + pub alias: TableAlias, + pub column_names: Option>, + pub select: CTExpr, +} + +/// The 'body' side of a Common Table Expression +#[derive(Debug, Clone, PartialEq)] +pub enum CTExpr { + Select(Select), + RawSql(Vec), + // Delete(Delete), + // Insert(Insert), + // Update(Update), +} + +/// Raw SQL written by a user which is opaque to us +#[derive(Debug, Clone, PartialEq)] +pub enum RawSql { + /// Raw SQL text + RawText(String), + /// An expression + Expression(Expression), +} + +/// A SELECT clause +#[derive(Debug, Clone, PartialEq)] +pub struct Select { + pub with: With, + pub select_list: SelectList, + pub from: Option, + // pub joins: Vec, + pub where_: Where, + // pub group_by: GroupBy, + pub order_by: OrderBy, + // pub limit: Limit, //remove limit +} + +/// An INSERT clause +#[derive(Debug, Clone, PartialEq)] +pub struct Insert { + pub schema: SchemaName, + pub table: TableName, + pub columns: Option>, + pub from: InsertFrom, + pub returning: Returning, +} + +/// Source from which values would be inserted. +#[derive(Debug, Clone, PartialEq)] +pub enum InsertFrom { + Values(Vec>), + Select(Select), +} + +/// An expression inside an INSERT VALUES clause or UPDATE SET clause. +#[derive(Debug, Clone, PartialEq)] +pub enum MutationValueExpression { + Default, + Expression(Expression), +} + +/// A DELETE clause +#[derive(Debug, Clone, PartialEq)] +pub struct Delete { + pub from: From, + pub where_: Where, + pub returning: Returning, +} + +/// An UPDATE clause +#[derive(Debug, Clone, PartialEq)] +pub struct Update { + pub schema: SchemaName, + pub table: TableName, + pub set: BTreeMap, + pub where_: Where, + pub returning: Returning, +} + +/// a RETURNING clause +#[derive(Debug, Clone, PartialEq)] +pub struct Returning(pub SelectList); + +/// A select list +#[derive(Debug, Clone, PartialEq)] +pub enum SelectList { + SelectList(Vec<(ColumnAlias, Expression)>), + SelectStar, + SelectStarFrom(TableReference), + Select1, +} + +/// A FROM clause +#[derive(Debug, Clone, PartialEq)] +pub enum From { + /// Select from a table reference + Table { + reference: TableReference, + alias: TableAlias, + }, + /// Select from a subquery + Select { + select: Box, TableAlias) { + match self { + Join::LeftOuterJoin(LeftOuterJoin { select, alias, .. }) + | Join::InnerJoin(InnerJoin { select, alias }) + | Join::CrossJoin(CrossJoin { select, alias }) + | Join::FullOuterJoin(FullOuterJoin { select, alias }) => (select, alias), + } + } +} + +/// A CROSS JOIN clause +#[derive(Debug, Clone, PartialEq)] +pub struct CrossJoin { + pub select: Box, + pub alias: TableAlias, + pub on: Expression, +} + +/// An INNER JOIN clause +#[derive(Debug, Clone, PartialEq)] +pub struct InnerJoin { + pub select: Box, + pub alias: TableAlias, +} + +/// A WHERE clause +#[derive(Debug, Clone, PartialEq)] +pub struct Where(pub Expression); + +/// A GROUP BY clause, currently not in use +#[derive(Debug, Clone, PartialEq)] +pub struct GroupBy {} + +/// An ORDER BY clause +#[derive(Debug, Clone, PartialEq)] +pub struct OrderBy { + pub elements: Vec, +} + +// todo: should we also include option for specifying NULLS FIRST | NULLS LAST +/// A single element in an ORDER BY clause +#[derive(Debug, Clone, PartialEq)] +pub struct OrderByElement { + pub target: Expression, + pub direction: OrderByDirection, +} + +/// A direction for a single ORDER BY element +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum OrderByDirection { + Asc, + Desc, +} + +/// LIMIT and OFFSET clauses +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Limit { + pub limit: Option, + pub offset: Option, +} + +/// A scalar expression +#[derive(Debug, Clone, PartialEq)] +pub enum Expression { + /// AND clause + And { + left: Box, + right: Box, + }, + /// OR clause + Or { + left: Box, + right: Box, + }, + /// NOT clause + Not(Box), + /// A binary operation on two scalar expression + BinaryOperation { + left: Box, + operator: BinaryOperator, + right: Box, + }, + /// A binary operation on a scalar expression and an array of scalar expressions + BinaryArrayOperation { + left: Box, + operator: BinaryArrayOperator, + right: Vec, + }, + /// An unary operation on a scalar expression + UnaryOperation { + expression: Box, + operator: UnaryOperator, + }, + /// A scalar function call + FunctionCall { + function: Function, + args: Vec, + }, + /// An EXISTS clause + Exists { + select: Box), + NestedFieldSelect { + expression: Box, + nested_field: NestedField, + }, + JoinExpressions(Vec), + SafeOffSet { + offset: i32, + }, + // JsonQuery(Box, JsonPath), // JSON_QUERY([album].[json], '$.title') for multiple + // // values + // JsonValue(Box, JsonPath), // JSON_VALUE([album].[json], '$.title') for single values +} + +// // JSON selector path for expressing '$.user.name' +// #[derive(Debug, Clone, PartialEq, Eq)] +// pub struct JsonPath { +// pub elements: Vec, +// } + +/// Represents the name of a field in a nested object. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct NestedField(pub String); + +/// An unary operator +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum UnaryOperator { + IsNull, +} + +/// Represents the name of a binary operator. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BinaryOperator(pub String); + +/// A binary operator when the rhs is an array +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum BinaryArrayOperator { + In, +} + +/// A scalar function +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Function { + Coalesce, + JsonAgg, + JsonbPopulateRecord, + ArrayAgg, + Unnest, + Unknown(String), + SafeOffSet(String), +} + +/// COUNT clause +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum CountType { + Star, + Simple(ColumnReference), + Distinct(ColumnReference), +} + +/// Value +#[derive(Debug, Clone, PartialEq)] +pub enum Value { + Int8(i32), + Float8(f64), + Bool(bool), + Character(String), + String(String), + Null, + Array(Vec), + JsonValue(serde_json::Value), + EmptyJsonArray, + Variable(String), +} + +/// Scalar type +#[derive(Debug, Clone, PartialEq)] + +pub enum ScalarType { + BaseType(ScalarTypeName), + ArrayType(ScalarTypeName), +} + +/// Scalar type name. This will always be output as a quoted identifier. +#[derive(Debug, Clone, PartialEq)] +pub enum ScalarTypeName { + /// A type name referencing a schema. + Qualified { + schema_name: SchemaName, + type_name: String, + }, + /// A type name without a schema. + Unqualified(String), +} + +/// A database schema name +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct SchemaName(pub String); + +/// A database table name +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TableName(pub String); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Gsi(pub String); + +/// A reference to a table. Used when we want to query it, +/// for example in a FROM clause. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TableReference { + /// refers to a db table object name + DBTable { + // schema: SchemaName, + table: TableName, + gsi: Option, + }, + /// refers to an alias we created + AliasedTable(TableAlias), +} + +/// A database table's column name +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ColumnName(pub String); + +/// A reference to a column. Used when we want to query it, +/// for example in a SELECT list. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ColumnReference { + /// refers to a db column object name + TableColumn { + table: TableReference, + name: ColumnName, + }, + /// refers to an alias we created + AliasedColumn { + table: TableReference, + column: ColumnAlias, + }, +} + +/// aliases that we give to relations +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TableAlias { + pub unique_index: u64, + pub name: String, +} + +/// aliases that we give to columns +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ColumnAlias { + pub name: String, +} + +#[derive(Debug, Clone)] +/// Whether this rows query returns fields or not. +pub enum ReturnsFields { + FieldsWereRequested, + NoFieldsWereRequested, +} diff --git a/crates/query-engine/sql/src/sql/convert.rs b/crates/query-engine/sql/src/sql/convert.rs new file mode 100644 index 0000000..84790e8 --- /dev/null +++ b/crates/query-engine/sql/src/sql/convert.rs @@ -0,0 +1,816 @@ +//! Convert a SQL AST to a low-level SQL string. + +use super::ast::*; +use super::helpers; +use super::string::*; + +// Convert to SQL strings + +impl With { + pub fn to_sql(&self, sql: &mut SQL) { + if self.common_table_expressions.is_empty() { + } else { + sql.append_syntax("WITH "); + + let ctes = &self.common_table_expressions; + for (index, cte) in ctes.iter().enumerate() { + cte.to_sql(sql); + if index < (ctes.len() - 1) { + sql.append_syntax(", "); + } + } + } + } +} + +impl CommonTableExpression { + pub fn to_sql(&self, sql: &mut SQL) { + self.alias.to_sql(sql); + match &self.column_names { + None => {} + Some(names) => { + sql.append_syntax("("); + for name in names { + name.to_sql(sql); + } + sql.append_syntax(")"); + } + } + + sql.append_syntax(" AS ("); + self.select.to_sql(sql); + // the newline is important because a native query might end with a comment + sql.append_syntax("\n)"); + } +} + +impl CTExpr { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + CTExpr::Select(select) => { + select.to_sql(sql); + } + CTExpr::RawSql(raw_vec) => { + for item in raw_vec { + item.to_sql(sql); + } + } // CTExpr::Delete(delete) => delete.to_sql(sql), + // CTExpr::Insert(insert) => insert.to_sql(sql), + // CTExpr::Update(update) => update.to_sql(sql), + } + } +} + +impl RawSql { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + RawSql::RawText(text) => sql.append_syntax(text), + RawSql::Expression(exp) => exp.to_sql(sql), + } + } +} + +impl Explain<'_> { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_syntax("EXPLAIN "); + match self { + Explain::Select(select) => select.to_sql(sql), + } + } +} + +impl SelectList { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + SelectList::SelectList(select_list) => { + for (index, (_alias, expr)) in select_list.iter().enumerate() { + expr.to_sql(sql); + // sql.append_syntax(" AS "); + // col.to_sql(sql); + if index < (select_list.len() - 1) { + sql.append_syntax(", "); + } + } + } + SelectList::SelectStar => { + sql.append_syntax("*"); + } + SelectList::SelectStarFrom(table_reference) => { + table_reference.to_sql(sql); + sql.append_syntax(".*"); + } + SelectList::Select1 => { + sql.append_syntax("1"); + } + } + } +} + +impl Select { + pub fn to_sql(&self, sql: &mut SQL) { + self.with.to_sql(sql); + + sql.append_syntax("SELECT "); + + self.select_list.to_sql(sql); + + sql.append_syntax(" "); + + match &self.from { + Some(from) => from.to_sql(sql), + None => (), + } + + // for join in &self.joins { + // join.to_sql(sql); + // } + + self.where_.to_sql(sql); + + self.order_by.to_sql(sql); + + // self.limit.to_sql(sql); + } +} + +impl Insert { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_syntax("INSERT INTO "); + + self.schema.to_sql(sql); + sql.append_syntax("."); + self.table.to_sql(sql); + + if let Some(columns) = &self.columns { + sql.append_syntax("("); + for (index, column_name) in columns.iter().enumerate() { + column_name.to_sql(sql); + if index < (columns.len() - 1) { + sql.append_syntax(", "); + } + } + sql.append_syntax(")"); + } + + sql.append_syntax(" "); + + self.from.to_sql(sql); + + sql.append_syntax(" "); + + self.returning.to_sql(sql); + } +} + +impl InsertFrom { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + InsertFrom::Select(select) => select.to_sql(sql), + InsertFrom::Values(values) => { + sql.append_syntax("VALUES "); + + for (index, object) in values.iter().enumerate() { + sql.append_syntax("("); + for (index, value) in object.iter().enumerate() { + value.to_sql(sql); + if index < (object.len() - 1) { + sql.append_syntax(", "); + } + } + sql.append_syntax(")"); + + if index < (values.len() - 1) { + sql.append_syntax(", "); + } + } + } + } + } +} + +impl Delete { + pub fn to_sql(&self, sql: &mut SQL) { + let Delete { + from, + where_, + returning, + } = &self; + + sql.append_syntax("DELETE "); + + from.to_sql(sql); + + sql.append_syntax(" "); + + where_.to_sql(sql); + + sql.append_syntax(" "); + + returning.to_sql(sql); + } +} + +impl Update { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_syntax("UPDATE "); + + self.schema.to_sql(sql); + sql.append_syntax("."); + self.table.to_sql(sql); + + sql.append_syntax(" SET "); + + // Set values to columns + for (index, (column, expression)) in self.set.iter().enumerate() { + column.to_sql(sql); + sql.append_syntax(" = "); + expression.to_sql(sql); + if index < (self.set.len() - 1) { + sql.append_syntax(", "); + } + } + + sql.append_syntax(" "); + + self.where_.to_sql(sql); + + sql.append_syntax(" "); + + self.returning.to_sql(sql); + } +} + +impl Returning { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_syntax("RETURNING "); + self.0.to_sql(sql); + } +} + +impl From { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_syntax("FROM "); + match &self { + From::Table { + reference, + alias: _, + } => { + reference.to_sql(sql); + // sql.append_syntax(" AS "); + // alias.to_sql(sql); + } + From::Select { select, alias: _ } => { + sql.append_syntax("("); + select.to_sql(sql); + sql.append_syntax(")"); + // sql.append_syntax(" AS "); + // alias.to_sql(sql); + } + From::JsonbToRecordset { + expression, + alias, + columns, + } => { + sql.append_syntax("jsonb_to_recordset"); + sql.append_syntax("("); + expression.to_sql(sql); + sql.append_syntax(")"); + sql.append_syntax(" AS "); + alias.to_sql(sql); + sql.append_syntax("("); + + for (index, (column, scalar_type)) in columns.iter().enumerate() { + column.to_sql(sql); + sql.append_syntax(" "); + scalar_type.to_sql(sql); + if index < (columns.len() - 1) { + sql.append_syntax(", "); + } + } + sql.append_syntax(")"); + } + From::JsonbArrayElements { + expression, + alias, + column, + } => { + sql.append_syntax("jsonb_array_elements"); + sql.append_syntax("("); + expression.to_sql(sql); + sql.append_syntax(")"); + sql.append_syntax(" AS "); + alias.to_sql(sql); + sql.append_syntax("("); + column.to_sql(sql); + sql.append_syntax(")"); + } + From::Unnest { + expression, + alias, + column, + } => { + sql.append_syntax("UNNEST"); + sql.append_syntax("("); + expression.to_sql(sql); + sql.append_syntax(")"); + sql.append_syntax(" AS "); + alias.to_sql(sql); + sql.append_syntax("("); + column.to_sql(sql); + sql.append_syntax(")"); + } + } + } +} + +// todo(PY): correct the join syntax for each join type +impl Join { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + Join::LeftOuterJoin(LeftOuterJoin { select, alias, on }) => { + sql.append_syntax(" LEFT OUTER JOIN "); + sql.append_syntax("("); + select.to_sql(sql); + sql.append_syntax(")"); + sql.append_syntax(" AS "); + alias.to_sql(sql); + sql.append_syntax(" ON ("); + on.to_sql(sql); + sql.append_syntax(") "); + } + Join::InnerJoin(InnerJoin { select, alias }) => { + sql.append_syntax(" INNER JOIN "); + sql.append_syntax("("); + select.to_sql(sql); + sql.append_syntax(")"); + sql.append_syntax(" AS "); + alias.to_sql(sql); + sql.append_syntax(" ON ('true') "); + } + Join::CrossJoin(CrossJoin { select, alias }) => { + sql.append_syntax(" CROSS JOIN "); + sql.append_syntax("("); + select.to_sql(sql); + sql.append_syntax(")"); + sql.append_syntax(" AS "); + alias.to_sql(sql); + } + Join::FullOuterJoin(join) => { + sql.append_syntax(" FULL OUTER JOIN "); + sql.append_syntax("("); + join.select.to_sql(sql); + sql.append_syntax(")"); + sql.append_syntax(" AS "); + join.alias.to_sql(sql); + sql.append_syntax(" ON ('true') "); + } + } + } +} + +impl Where { + pub fn to_sql(&self, sql: &mut SQL) { + let Where(expression) = self; + if *expression != helpers::true_expr() { + sql.append_syntax(" WHERE "); + expression.to_sql(sql); + } + } +} + +// scalars +impl Expression { + pub fn to_sql(&self, sql: &mut SQL) { + match &self { + Expression::ColumnReference(column_reference) => column_reference.to_sql(sql), + Expression::TableReference(table_reference) => table_reference.to_sql(sql), + Expression::Value(value) => value.to_sql(sql), + Expression::Cast { + expression, + r#type: _, + } => { + // There is no cast expression in DynamoDB + expression.to_sql(sql); + } + Expression::And { left, right } => { + sql.append_syntax("("); + left.to_sql(sql); + sql.append_syntax(" AND "); + right.to_sql(sql); + sql.append_syntax(")"); + } + Expression::Or { left, right } => { + sql.append_syntax("("); + left.to_sql(sql); + sql.append_syntax(" OR "); + right.to_sql(sql); + sql.append_syntax(")"); + } + Expression::Not(expr) => { + sql.append_syntax("NOT "); + expr.to_sql(sql); + } + Expression::BinaryOperation { + left, + operator, + right, + } => { + sql.append_syntax("("); + left.to_sql(sql); + operator.to_sql(sql); + right.to_sql(sql); + sql.append_syntax(")"); + } + Expression::BinaryArrayOperation { + left, + operator, + right, + } => { + sql.append_syntax("("); + { + left.to_sql(sql); + operator.to_sql(sql); + sql.append_syntax("("); + for (index, item) in right.iter().enumerate() { + item.to_sql(sql); + if index < (right.len() - 1) { + sql.append_syntax(", "); + } + } + sql.append_syntax(")"); + } + sql.append_syntax(")"); + } + Expression::UnaryOperation { + expression, + operator, + } => { + sql.append_syntax("("); + expression.to_sql(sql); + operator.to_sql(sql); + sql.append_syntax(")"); + } + Expression::FunctionCall { function, args } => { + function.to_sql(sql); + sql.append_syntax("("); + for (index, arg) in args.iter().enumerate() { + arg.to_sql(sql); + if index < (args.len() - 1) { + sql.append_syntax(", "); + } + } + sql.append_syntax(")"); + } + Expression::JoinExpressions(expressions) => { + for (index, expression) in expressions.iter().enumerate() { + expression.to_sql(sql); + if index < (expressions.len() - 1) { + sql.append_syntax(""); + } + } + } + Expression::SafeOffSet { offset } => { + sql.append_syntax(format!("[SAFE_OFFSET({offset})]").as_str()); + } + Expression::Exists { select } => { + sql.append_syntax("EXISTS "); + sql.append_syntax("("); + select.to_sql(sql); + sql.append_syntax(")"); + } + Expression::JsonBuildObject(map) => { + sql.append_syntax("JSON_OBJECT"); + sql.append_syntax("("); + + for (index, (label, item)) in map.iter().enumerate() { + sql.append_syntax("'"); + sql.append_syntax(label); + sql.append_syntax("'"); + sql.append_syntax(", "); + item.to_sql(sql); + + if index < (map.len() - 1) { + sql.append_syntax(", "); + } + } + + sql.append_syntax(")"); + } + Expression::RowToJson(select) => { + sql.append_syntax("row_to_json"); + sql.append_syntax("("); + select.to_sql(sql); + sql.append_syntax(")"); + } + Expression::Count(count_type) => { + sql.append_syntax("COUNT"); + sql.append_syntax("("); + count_type.to_sql(sql); + sql.append_syntax(")"); + } + Expression::ArrayConstructor(elements) => { + sql.append_syntax("ARRAY["); + for (index, element) in elements.iter().enumerate() { + element.to_sql(sql); + + if index < (elements.len() - 1) { + sql.append_syntax(", "); + } + } + sql.append_syntax("]"); + } + Expression::CorrelatedSubSelect(select) => { + sql.append_syntax("("); + select.to_sql(sql); + sql.append_syntax(")"); + } + Expression::NestedFieldSelect { + expression, + nested_field, + } => { + sql.append_syntax("("); + expression.to_sql(sql); + sql.append_syntax(")"); + sql.append_syntax("."); + nested_field.to_sql(sql); + } // Expression::JsonQuery(target, path) => { + // sql.append_syntax("JSON_QUERY"); + // sql.append_syntax("("); + // target.to_sql(sql); + // sql.append_syntax(", "); + // path.to_sql(sql); + // sql.append_syntax(")") + // } + // Expression::JsonValue(target, path) => { + // sql.append_syntax("JSON_VALUE"); + // sql.append_syntax("("); + // target.to_sql(sql); + // sql.append_syntax(", "); + // path.to_sql(sql); + // sql.append_syntax(")") + // } + } + } +} + +// impl JsonPath { +// pub fn to_sql(&self, sql: &mut SQL) { +// sql.append_syntax("'$"); +// for ColumnAlias { +// name: path_item, .. +// } in self.elements.iter() +// { +// sql.append_syntax("."); +// sql.append_syntax(path_item); +// } +// sql.append_syntax("'"); +// } +// } + +impl UnaryOperator { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + UnaryOperator::IsNull => sql.append_syntax(" IS NULL "), + } + } +} + +impl BinaryOperator { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_syntax(" "); + sql.append_syntax(&self.0); + sql.append_syntax(" "); + } +} + +impl BinaryArrayOperator { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + BinaryArrayOperator::In => sql.append_syntax(" IN "), + } + } +} + +impl NestedField { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_identifier(&self.0); + } +} + +impl Function { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + Function::Coalesce => sql.append_syntax("coalesce"), + Function::JsonAgg => sql.append_syntax("json_agg"), + Function::JsonbPopulateRecord => sql.append_syntax("jsonb_populate_record"), + Function::ArrayAgg => sql.append_syntax("ARRAY_AGG"), + Function::Unnest => sql.append_syntax("unnest"), + Function::Unknown(name) => sql.append_syntax(name), + Function::SafeOffSet(index) => { + sql.append_syntax(format!("[SAFE_OFFSET({index})]").as_str()); + } + } + } +} + +impl CountType { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + CountType::Star => sql.append_syntax("*"), + CountType::Simple(column) => column.to_sql(sql), + CountType::Distinct(column) => { + sql.append_syntax("DISTINCT "); + column.to_sql(sql); + } + } + } +} + +impl Value { + pub fn to_sql(&self, sql: &mut SQL) { + match &self { + Value::EmptyJsonArray => sql.append_syntax("'[]'"), + Value::Int8(i) => sql.append_syntax(format!("{i}").as_str()), + Value::Float8(n) => sql.append_syntax(format!("{n}").as_str()), + Value::Character(s) | Value::String(s) => sql.append_syntax(format!("'{s}'").as_str()), + // Value::Character(s) | Value::String(s) => sql.append_param(Param::String(s.clone())), + Value::Variable(v) => sql.append_param(Param::Variable(v.clone())), + Value::Bool(true) => sql.append_syntax("true"), + Value::Bool(false) => sql.append_syntax("false"), + Value::Null => sql.append_syntax("null"), + Value::JsonValue(v) => sql.append_param(Param::Value(v.clone())), + Value::Array(items) => { + sql.append_syntax("ARRAY ["); + for (index, item) in items.iter().enumerate() { + item.to_sql(sql); + if index < (items.len() - 1) { + sql.append_syntax(", "); + } + } + sql.append_syntax("]"); + } + } + } +} + +impl MutationValueExpression { + pub fn to_sql(&self, sql: &mut SQL) { + match &self { + MutationValueExpression::Expression(expression) => expression.to_sql(sql), + MutationValueExpression::Default => sql.append_syntax("DEFAULT"), + } + } +} + +impl ScalarType { + pub fn to_sql(&self, sql: &mut SQL) { + match &self { + ScalarType::BaseType(scalar_type_name) => { + scalar_type_name.to_sql(sql); + } + ScalarType::ArrayType(scalar_type_name) => { + scalar_type_name.to_sql(sql); + sql.append_syntax("[]"); + } + }; + } +} + +impl ScalarTypeName { + pub fn to_sql(&self, sql: &mut SQL) { + match &self { + ScalarTypeName::Qualified { + schema_name, + type_name, + } => { + schema_name.to_sql(sql); + sql.append_syntax("."); + sql.append_identifier(type_name); + } + ScalarTypeName::Unqualified(type_name) => { + sql.append_identifier(type_name); + } + }; + } +} + +impl Limit { + pub fn to_sql(&self, sql: &mut SQL) { + match self.limit { + None => (), + Some(limit) => { + sql.append_syntax(" LIMIT "); + sql.append_syntax(format!("{limit}").as_str()); + } + }; + match self.offset { + None => (), + Some(offset) => { + sql.append_syntax(" OFFSET "); + sql.append_syntax(format!("{offset}").as_str()); + } + }; + } +} + +// names +impl TableReference { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + TableReference::DBTable { table, gsi } => { + sql.append_identifier(&table.0); + match gsi { + None => (), + Some(gsi) => { + sql.append_syntax("."); + sql.append_identifier(&gsi.0); + } + } + } + TableReference::AliasedTable(alias) => alias.to_sql(sql), + }; + } +} + +impl SchemaName { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_identifier(&self.0); + } +} + +impl TableName { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_identifier(&self.0); + } +} + +impl TableAlias { + pub fn to_sql(&self, sql: &mut SQL) { + let name = self.to_aliased_string(); + sql.append_identifier(&name); + } + + pub fn to_aliased_string(&self) -> String { + format!("{}_{}", self.name, self.unique_index) + } +} + +impl ColumnReference { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + ColumnReference::TableColumn { table: _, name } => { + // table.to_sql(sql); + // sql.append_syntax("."); + sql.append_identifier(&name.0.to_string()); + } + ColumnReference::AliasedColumn { column, table: _ } => { + // table.to_sql(sql); + // sql.append_syntax("."); + column.to_sql(sql); + } + }; + } +} + +impl ColumnName { + pub fn to_sql(&self, sql: &mut SQL) { + sql.append_identifier(&self.0); + } +} + +impl ColumnAlias { + pub fn to_sql(&self, sql: &mut SQL) { + let name = self.name.to_string(); + sql.append_identifier(&name); + } +} + +impl OrderBy { + pub fn to_sql(&self, sql: &mut SQL) { + if !self.elements.is_empty() { + sql.append_syntax(" ORDER BY "); + for (index, order_by_item) in self.elements.iter().enumerate() { + order_by_item.to_sql(sql); + if index < (self.elements.len() - 1) { + sql.append_syntax(", "); + } + } + } + } +} + +impl OrderByElement { + pub fn to_sql(&self, sql: &mut SQL) { + self.target.to_sql(sql); + self.direction.to_sql(sql); + } +} + +impl OrderByDirection { + pub fn to_sql(&self, sql: &mut SQL) { + match self { + OrderByDirection::Asc => sql.append_syntax(" ASC "), + OrderByDirection::Desc => sql.append_syntax(" DESC "), + } + } +} diff --git a/crates/query-engine/sql/src/sql/execution_plan.rs b/crates/query-engine/sql/src/sql/execution_plan.rs new file mode 100644 index 0000000..7a19a2d --- /dev/null +++ b/crates/query-engine/sql/src/sql/execution_plan.rs @@ -0,0 +1,81 @@ +//! Describe the SQL execution plan. + +use crate::sql; +use ndc_models as models; + +use std::collections::BTreeMap; + +#[derive(Debug)] +/// Definition of an execution plan to be run against the database. +pub struct ExecutionPlan { + /// Run before the query. Should be a sql::ast in the future. + pub pre: Vec, + /// The query. + pub query: Query, + /// Run after the query. Should be a sql::ast in the future. + pub post: Vec, +} + +/// The query we want to run with some additional information. +#[derive(Debug)] +pub struct Query { + /// The root field name of the top-most collection. + pub root_field: models::CollectionName, + /// foreach variables. + pub variables: Option>>, + /// The query. + pub query: sql::ast::Select, + /// Query limit + pub limit: Option, +} + +impl Query { + /// Extract the query component as SQL. + pub fn query_sql(&self) -> sql::string::SQL { + select_to_sql(&self.query) + } + pub fn explain_query_sql(&self) -> sql::string::SQL { + explain_to_sql(&sql::ast::Explain::Select(&self.query)) + } +} + +// impl ExecutionPlan { +// /// Extract the query component as SQL. +// pub fn query(&self) -> sql::string::SQL { +// select_to_sql(&self.query) +// } +// pub fn explain_query(&self) -> sql::string::SQL { +// explain_to_sql(&sql::ast::Explain::Select(&self.query)) +// } +// } + +pub fn select_to_sql(select: &sql::ast::Select) -> sql::string::SQL { + let mut sql = sql::string::SQL::new(); + select.to_sql(&mut sql); + sql +} + +pub fn explain_to_sql(explain: &sql::ast::Explain) -> sql::string::SQL { + let mut sql = sql::string::SQL::new(); + explain.to_sql(&mut sql); + sql +} + +/// A simple query execution plan with only a root field and a query. +pub fn simple_query_execution_plan( + variables: Option>>, + root_field: models::CollectionName, + query: sql::ast::Select, + limit: Option, +) -> ExecutionPlan { + ExecutionPlan { + pre: vec![], + query: Query { + root_field, + variables, + query, + limit, + }, + post: vec![], + } +} diff --git a/crates/query-engine/sql/src/sql/helpers.rs b/crates/query-engine/sql/src/sql/helpers.rs new file mode 100644 index 0000000..f0a6e17 --- /dev/null +++ b/crates/query-engine/sql/src/sql/helpers.rs @@ -0,0 +1,243 @@ +//! Helpers for building sql::ast types in certain shapes and patterns. + +use super::ast::*; + +/// Used as input to helpers to construct SELECTs which return 'rows' and/or 'aggregates' results. +#[derive(Debug, Clone, PartialEq)] +pub enum SelectSet { + Rows(Select), +} + +// Empty clauses // + +/// An empty `WITH` clause. +pub fn empty_with() -> With { + With { + common_table_expressions: vec![], + } +} + +/// Add a `WITH` clause to a select. +pub fn wrap_with(with: With, mut select: Select) -> Select { + select.with = with; + select +} + +/// An empty `WHERE` clause. +pub fn empty_where() -> Expression { + Expression::Value(Value::Bool(true)) +} + +/// An empty `GROUP BY` clause. +pub fn empty_group_by() -> GroupBy { + GroupBy {} +} + +/// An empty `ORDER BY` clause. +pub fn empty_order_by() -> OrderBy { + OrderBy { elements: vec![] } +} + +/// Empty `LIMIT` and `OFFSET` clauses. +pub fn empty_limit() -> Limit { + Limit { + limit: None, + offset: None, + } +} + +/// A `true` expression. +pub fn true_expr() -> Expression { + Expression::Value(Value::Bool(true)) +} + +/// A `false` expression. +pub fn false_expr() -> Expression { + Expression::Value(Value::Bool(false)) +} + +// Aliasing // + +/// Generate a column expression refering to a specific table. +pub fn make_column( + table: TableReference, + name: ColumnName, + alias: ColumnAlias, +) -> (ColumnAlias, Expression) { + ( + alias, + Expression::ColumnReference(ColumnReference::TableColumn { table, name }), + ) +} +/// Create column aliases using this function so we build everything in one place. +pub fn make_column_alias(name: String) -> ColumnAlias { + ColumnAlias { name } +} + +// SELECTs // + +/// Build a simple select with a select list and the rest are empty. +pub fn simple_select(select_list: Vec<(ColumnAlias, Expression)>) -> Select { + Select { + with: empty_with(), + select_list: SelectList::SelectList(select_list), + from: None, + // joins: vec![], + where_: Where(empty_where()), + // group_by: empty_group_by(), + order_by: empty_order_by(), + // limit: empty_limit(), + } +} + +/// Build a simple select * +pub fn star_select(from: From) -> Select { + Select { + with: empty_with(), + select_list: SelectList::SelectStar, + from: Some(from), + // joins: vec![], + where_: Where(empty_where()), + // group_by: empty_group_by(), + order_by: empty_order_by(), + // limit: empty_limit(), + } +} + +/// Build a simple select .* +pub fn star_from_select(table: TableReference, from: From) -> Select { + Select { + with: empty_with(), + select_list: SelectList::SelectStarFrom(table), + from: Some(from), + where_: Where(empty_where()), + order_by: empty_order_by(), + } +} + +/// Generate an EXISTS where expression. +pub fn where_exists_select(from: From, where_: Where) -> Expression { + Expression::Exists { + select: Box::new(Select { + with: empty_with(), + select_list: SelectList::Select1, + from: Some(from), + where_, + order_by: empty_order_by(), + }), + } +} + +/// Do we want to aggregate results or return a single row? +#[derive(Clone, Copy)] +pub enum ResultsKind { + AggregateResults, + ObjectResults, +} + +/// Given a set of rows, a set of aggregate queries and a variables from clause & table reference, +/// combine them into one Select. +pub fn select_rowset(select_set: SelectSet, returns_field: &ReturnsFields) -> Select { + match select_set { + SelectSet::Rows(row_select) => match returns_field { + ReturnsFields::FieldsWereRequested => row_select, + ReturnsFields::NoFieldsWereRequested => { + todo!("not supported yet") + } + }, + } +} + +/// An unqualified scalar type representing int4. +pub fn int4_type() -> ScalarType { + ScalarType::BaseType(ScalarTypeName::Unqualified("int4".to_string())) +} + +/// Turn all rows of a query result into a single json array of objects. +/// +/// Wrap a query that returns multiple rows in the following format: +/// +/// ```sql +/// SELECT +/// coalesce(json_agg(row_to_json()), '[]') AS +/// FROM as +/// ``` +/// +/// - `row_to_json` takes a row and converts it to a json object. +/// - `json_agg` aggregates the json objects to a json array. +/// - `coalesce(, )` returns `` if it is not null, and `` if it is null. +pub fn select_rows_as_json( + row_select: Select, + column_alias: ColumnAlias, + table_alias: TableAlias, +) -> Select { + let expression = Expression::FunctionCall { + function: Function::Coalesce, + args: vec![ + Expression::FunctionCall { + function: Function::JsonAgg, + args: vec![Expression::RowToJson(TableReference::AliasedTable( + table_alias.clone(), + ))], + }, + Expression::Value(Value::EmptyJsonArray), + ], + }; + let mut select = simple_select(vec![(column_alias, expression)]); + select.from = Some(From::Select { + select: Box::new(row_select), + alias: table_alias, + }); + select +} + +/// SQL field name to be used for keeping the values of variable sets. +pub const VARIABLES_FIELD: &str = "%variables"; + +/// This name will be used as a placeholder for a postgres parameter to which the +/// user variables sets will be passed. +pub const VARIABLES_OBJECT_PLACEHOLDER: &str = "%VARIABLES_OBJECT_PLACEHOLDER"; + +/// SQL field name to be used for ordering results with multiple variable sets. +pub const VARIABLE_ORDER_FIELD: &str = "%variable_order"; + +/// An unqualified scalar type representing jsonb. +pub fn jsonb_type() -> ScalarType { + ScalarType::BaseType(ScalarTypeName::Unqualified("jsonb".to_string())) +} + +/// An unqualified scalar type name representing text. +pub fn text_type_name() -> ScalarTypeName { + ScalarTypeName::Unqualified("text".to_string()) +} + +/// Wrap a query that returns a single row in the following: +/// +/// ```sql +/// SELECT +/// coalesce(row_to_json(), '{}'::json)) AS +/// FROM as +/// ``` +/// +/// - `row_to_json` takes a row and converts it to a json object. +/// - `coalesce(, )` returns `` if it is not null, and `` if it is null. +/// +pub fn select_row_as_json_with_default( + select: Select, + column_alias: ColumnAlias, + table_alias: TableAlias, +) -> Select { + let expression = Expression::FunctionCall { + function: Function::Coalesce, + args: vec![ + Expression::RowToJson(TableReference::AliasedTable(table_alias.clone())), + Expression::Value(Value::EmptyJsonArray), + ], + }; + let mut final_select = simple_select(vec![(column_alias, expression)]); + final_select.from = Some(From::Select { + select: Box::new(select), + alias: table_alias, + }); + final_select +} diff --git a/crates/query-engine/sql/src/sql/mod.rs b/crates/query-engine/sql/src/sql/mod.rs new file mode 100644 index 0000000..3a6a93f --- /dev/null +++ b/crates/query-engine/sql/src/sql/mod.rs @@ -0,0 +1,8 @@ +// //! Anything related to building and emitting SQL. + +pub mod ast; +pub mod convert; +pub mod execution_plan; +pub mod helpers; +pub mod rewrites; +pub mod string; diff --git a/crates/query-engine/sql/src/sql/rewrites/constant_folding.rs b/crates/query-engine/sql/src/sql/rewrites/constant_folding.rs new file mode 100644 index 0000000..a113be3 --- /dev/null +++ b/crates/query-engine/sql/src/sql/rewrites/constant_folding.rs @@ -0,0 +1,377 @@ +//! Simple constant expressions folding. +//! We won't work very hard here because we assume PostgreSQL has +//! similar rewrites. +use crate::sql::ast::*; + +/// Normalize all expressions in select. +pub fn normalize_select(mut select: Select) -> Select { + // with + select.with.common_table_expressions = select + .with + .common_table_expressions + .into_iter() + .map(normalize_cte) + .collect(); + + // select list + select.select_list = normalize_select_list(select.select_list); + + // from + select.from = select.from.map(normalize_from); + + // joins + // select.joins = select.joins.into_iter().map(normalize_join).collect(); + + // where + select.where_ = Where(normalize_expr(select.where_.0)); + + // order by + select.order_by.elements = select + .order_by + .elements + .into_iter() + .map(normalize_order_by_element) + .collect(); + + // return modified select + select +} + +/// Normalize all expressions in a select list. +pub fn normalize_select_list(select_list: SelectList) -> SelectList { + match select_list { + SelectList::SelectStar => SelectList::SelectStar, + SelectList::SelectStarFrom(table) => SelectList::SelectStarFrom(table), + SelectList::Select1 => SelectList::Select1, + SelectList::SelectList(vec) => SelectList::SelectList( + vec.into_iter() + .map(|(alias, expr)| (alias, normalize_expr(expr))) + .collect(), + ), + } +} + +/// Normalize the select in the join. +pub fn normalize_join(join: Join) -> Join { + match join { + Join::LeftOuterJoin(LeftOuterJoin { select, alias, on }) => { + Join::LeftOuterJoin(LeftOuterJoin { + select: Box::new(normalize_select(*select)), + alias, + on, + }) + } + Join::InnerJoin(InnerJoin { select, alias }) => Join::InnerJoin(InnerJoin { + select: Box::new(normalize_select(*select)), + alias, + }), + Join::FullOuterJoin(FullOuterJoin { select, alias }) => { + Join::FullOuterJoin(FullOuterJoin { + select: Box::new(normalize_select(*select)), + alias, + }) + } + // Join::CrossJoinLateral(CrossJoin { select, alias }) => Join::CrossJoinLateral(CrossJoin { + // select: Box::new(normalize_select(*select)), + // alias, + // }), + Join::CrossJoin(CrossJoin { select, alias }) => Join::CrossJoin(CrossJoin { + select: Box::new(normalize_select(*select)), + alias, + }), + } +} + +/// Normalize a from select. +fn normalize_from(from: From) -> From { + match from { + From::Select { alias, select } => From::Select { + alias, + select: Box::new(normalize_select(*select)), + }, + from => from, + } +} + +/// Normalize the expression in an OrderByElement. +pub fn normalize_order_by_element(mut element: OrderByElement) -> OrderByElement { + element.target = normalize_expr(element.target); + element +} + +/// Normalize the expression in a common table expression. +pub fn normalize_cte(mut cte: CommonTableExpression) -> CommonTableExpression { + cte.select = match cte.select { + CTExpr::Select(select) => CTExpr::Select(normalize_select(select)), + CTExpr::RawSql(raw_sqls) => CTExpr::RawSql( + raw_sqls + .into_iter() + .map(|raw_sql| match raw_sql { + RawSql::RawText(string) => RawSql::RawText(string), + RawSql::Expression(expr) => RawSql::Expression(normalize_expr(expr)), + }) + .collect(), + ), + // CTExpr::Delete(delete) => CTExpr::Delete(normalize_delete(delete)), + // CTExpr::Insert(insert) => CTExpr::Insert(normalize_insert(insert)), + // CTExpr::Update(update) => CTExpr::Update(normalize_update(update)), + }; + cte +} + +/// Constant expressions folding. Remove redundant expressions. +/// This is the main work. The other parts are just trying to apply +/// this rewrite to their Expressions. +pub fn normalize_expr(expr: Expression) -> Expression { + match expr { + // 'true' as a unit element for 'And' + Expression::And { left, right } => { + let oleft = match normalize_expr(*left) { + Expression::Value(Value::Bool(true)) => None, + e => Some(e), + }; + let oright = match normalize_expr(*right) { + Expression::Value(Value::Bool(true)) => None, + e => Some(e), + }; + match (oleft, oright) { + (Some(left), Some(right)) => Expression::And { + left: Box::new(left), + right: Box::new(right), + }, + (Some(left), None) => left, + (None, Some(right)) => right, + // both expressions are None (true), so and of two trues is true. + (None, None) => Expression::Value(Value::Bool(true)), + } + } + // 'false' as a unit element for 'Or' + Expression::Or { left, right } => { + // none means false, some means expression + let oleft = match normalize_expr(*left) { + Expression::Value(Value::Bool(false)) => None, + e => Some(e), + }; + let oright = match normalize_expr(*right) { + Expression::Value(Value::Bool(false)) => None, + e => Some(e), + }; + match (oleft, oright) { + (Some(left), Some(right)) => Expression::Or { + left: Box::new(left), + right: Box::new(right), + }, + (Some(left), None) => left, + (None, Some(right)) => right, + // both expressions are None (false), so or of two falses is false. + (None, None) => Expression::Value(Value::Bool(false)), + } + } + // fold the expressions in the select. + Expression::Exists { select } => Expression::Exists { + select: Box::new(normalize_select(*select)), + }, + // reverse not on literal bool. + Expression::Not(expr) => match normalize_expr(*expr) { + Expression::Value(Value::Bool(false)) => Expression::Value(Value::Bool(true)), + Expression::Value(Value::Bool(true)) => Expression::Value(Value::Bool(false)), + expr => Expression::Not(Box::new(expr)), + }, + // Apply inner + Expression::BinaryOperation { + left, + operator, + right, + // Apply inner + } => Expression::BinaryOperation { + left: Box::new(normalize_expr(*left)), + operator, + right: Box::new(normalize_expr(*right)), + }, + // Apply inner + Expression::BinaryArrayOperation { + left, + operator, + right, + } => Expression::BinaryArrayOperation { + left: Box::new(normalize_expr(*left)), + operator, + right: right.into_iter().map(normalize_expr).collect(), + }, + // Apply inner + Expression::UnaryOperation { + expression, + operator, + } => Expression::UnaryOperation { + expression: Box::new(normalize_expr(*expression)), + operator, + }, + // Apply inner + Expression::FunctionCall { function, args } => Expression::FunctionCall { + function, + args: args.into_iter().map(normalize_expr).collect(), + }, + // Apply inner + Expression::JsonBuildObject(object) => Expression::JsonBuildObject( + object + .into_iter() + .map(|(key, expr)| (key, normalize_expr(expr))) + .collect(), + ), + // Apply inner + Expression::Cast { + expression, + r#type: scalar_type, + } => Expression::Cast { + expression: Box::new(normalize_expr(*expression)), + r#type: scalar_type, + }, + // Apply inner + Expression::ArrayConstructor(array) => { + Expression::ArrayConstructor(array.into_iter().map(normalize_expr).collect()) + } + // Apply inner + Expression::CorrelatedSubSelect(select) => { + Expression::CorrelatedSubSelect(Box::new(normalize_select(*select))) + } + // Apply inner + Expression::NestedFieldSelect { + expression, + nested_field, + } => Expression::NestedFieldSelect { + expression: Box::new(normalize_expr(*expression)), + nested_field, + }, + // Nothing to do. + Expression::RowToJson(_) + | Expression::ColumnReference(_) + | Expression::TableReference(_) + | Expression::Value(_) + | Expression::Count(_) => expr, + Expression::JoinExpressions(expressions) => { + Expression::JoinExpressions(expressions.into_iter().map(normalize_expr).collect()) + } + Expression::SafeOffSet { offset } => Expression::SafeOffSet { offset }, + } +} + +/// Tests +#[cfg(test)] +mod tests { + use super::normalize_expr; + use crate::sql::ast::*; + + fn expr_false() -> Expression { + Expression::Value(Value::Bool(false)) + } + + fn expr_true() -> Expression { + Expression::Value(Value::Bool(true)) + } + + fn expr_seven() -> Expression { + Expression::Value(Value::Int8(7)) + } + + fn expr_and(left: Expression, right: Expression) -> Expression { + Expression::And { + left: Box::new(left), + right: Box::new(right), + } + } + + fn expr_or(left: Expression, right: Expression) -> Expression { + Expression::Or { + left: Box::new(left), + right: Box::new(right), + } + } + + fn expr_not(expr: Expression) -> Expression { + Expression::Not(Box::new(expr)) + } + + fn expr_eq(left: Expression, right: Expression) -> Expression { + Expression::BinaryOperation { + left: Box::new(left), + operator: BinaryOperator("=".to_string()), + right: Box::new(right), + } + } + + #[test] + fn true_and_true_is_true() { + let left_side = expr_true(); + let right_side = expr_true(); + let expr = expr_and(left_side, right_side); + assert_eq!(normalize_expr(expr), expr_true()); + } + + #[test] + fn false_or_false_is_false() { + let left_side = expr_false(); + let right_side = expr_false(); + let expr = expr_or(left_side, right_side); + assert_eq!(normalize_expr(expr), expr_false()); + } + + #[test] + fn true_and_false_is_false() { + let left_side = expr_true(); + let right_side = expr_false(); + let expr = expr_and(left_side, right_side); + assert_eq!(normalize_expr(expr), expr_false()); + } + + #[test] + fn false_or_true_is_true() { + let left_side = expr_false(); + let right_side = expr_true(); + let expr = expr_or(left_side, right_side); + assert_eq!(normalize_expr(expr), expr_true()); + } + + #[test] + fn left_is_true() { + let left_side = expr_true(); + let right_side = expr_eq(expr_seven(), expr_seven()); + let expr = expr_and(left_side, right_side.clone()); + assert_eq!(normalize_expr(expr), right_side); + } + + #[test] + fn right_is_true() { + let left_side = expr_eq(expr_seven(), expr_seven()); + let right_side = expr_true(); + let expr = expr_and(left_side.clone(), right_side); + assert_eq!(normalize_expr(expr), left_side); + } + + #[test] + fn true_removed_in_complex_expr() { + let eq_expr = expr_eq(expr_seven(), expr_seven()); + let left_side = expr_and(expr_true(), eq_expr.clone()); + let right_side = expr_true(); + let expr = expr_and(left_side, right_side); + assert_eq!(normalize_expr(expr), eq_expr); + } + + #[test] + fn eq_expr_is_not_removed() { + let eq_expr = expr_eq(expr_seven(), expr_seven()); + let left_side = expr_seven(); + let right_side = expr_and(eq_expr.clone(), eq_expr); + let expr = expr_and(left_side, right_side); + assert_eq!(normalize_expr(expr.clone()), expr); + } + + #[test] + fn not_false_is_removed() { + let eq_expr = expr_eq(expr_seven(), expr_seven()); + let not_false = expr_not(expr_false()); + let left_side = expr_and(not_false, eq_expr.clone()); + let right_side = expr_true(); + let expr = expr_and(left_side, right_side); + assert_eq!(normalize_expr(expr), eq_expr); + } +} diff --git a/crates/query-engine/sql/src/sql/rewrites/mod.rs b/crates/query-engine/sql/src/sql/rewrites/mod.rs new file mode 100644 index 0000000..20d74da --- /dev/null +++ b/crates/query-engine/sql/src/sql/rewrites/mod.rs @@ -0,0 +1,2 @@ +//! Various rewrites and optimizations over the SQL AST. +pub mod constant_folding; diff --git a/crates/query-engine/sql/src/sql/string.rs b/crates/query-engine/sql/src/sql/string.rs new file mode 100644 index 0000000..b224f89 --- /dev/null +++ b/crates/query-engine/sql/src/sql/string.rs @@ -0,0 +1,62 @@ +//! Type definitions of a low-level SQL string representation. + +/// A low-level builder for SQL. +#[derive(Debug, PartialEq, Eq)] +pub struct SQL { + pub sql: String, + pub params: Vec, +} + +impl Default for SQL { + fn default() -> Self { + Self::new() + } +} + +/// A parameter for a parameterized query. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Param { + /// A literal string + String(String), + /// A JSON value + Value(serde_json::Value), + /// A variable name to look up in the `variables` field in a `QueryRequest`. + Variable(String), +} + +/// A DDL statement. +#[derive(Debug)] +pub struct DDL(pub SQL); + +/// A statement. +#[derive(Debug)] +pub struct Statement(pub SQL); + +impl SQL { + pub fn new() -> SQL { + SQL { + sql: String::new(), + params: vec![], + } + } + /// Append regular SQL syntax like a keyword (like `SELECT`), punctuation, etc. + pub fn append_syntax(&mut self, sql: &str) { + self.sql.push_str(sql); + } + /// Append a SQL identifier like a column or a table name, which will be + /// inserted surrounded by quotes + pub fn append_identifier(&mut self, sql: &String) { + // todo: sanitize + self.sql.push_str(sql.to_string().as_str()); + } + /// Append a parameter to a parameterized query. Will be represented as $1, $2, and so on, + /// in the sql query text, and will be inserted to the `params` vector, so we can + /// bind them later when we run the query. + pub fn append_param(&mut self, param: Param) { + // we want the postgres param to start from 1 + // so we first push the param and then check the length of the vector. + self.params.push(param); + self.sql + .push_str(format!("@param{}", self.params.len()).as_str()); + } +} diff --git a/crates/query-engine/translation/Cargo.toml b/crates/query-engine/translation/Cargo.toml new file mode 100644 index 0000000..4929a89 --- /dev/null +++ b/crates/query-engine/translation/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "query-engine-translation" +version.workspace = true +edition.workspace = true + +[dependencies] +ndc-sdk = { workspace = true } +ndc-models = { workspace = true } + +query-engine-metadata = { path = "../metadata" } +query-engine-sql = { path = "../sql" } +ndc-dynamodb-configuration = { path = "../../../crates/configuration" } + +indexmap = { workspace = true } +multimap = { workspace = true } +ref-cast = { workspace = true } +serde_json = { workspace = true } +thiserror = { workspace = true } +tracing = { workspace = true } +anyhow = { workspace = true } +tokio = { workspace = true } + +[dev-dependencies] +insta = { workspace = true, features = ["json"] } +sqlformat = { workspace = true } \ No newline at end of file diff --git a/crates/query-engine/translation/src/lib.rs b/crates/query-engine/translation/src/lib.rs new file mode 100644 index 0000000..d0488a7 --- /dev/null +++ b/crates/query-engine/translation/src/lib.rs @@ -0,0 +1,4 @@ +//! Translation from ndc-spec requests to PostgreSQL execution plans. +//! See `/architecture.md#translation` in the repository for more details. + +pub mod translation; diff --git a/crates/query-engine/translation/src/translation/error.rs b/crates/query-engine/translation/src/translation/error.rs new file mode 100644 index 0000000..3899337 --- /dev/null +++ b/crates/query-engine/translation/src/translation/error.rs @@ -0,0 +1,251 @@ +//! Errors for translation. + +use ndc_models as models; +use query_engine_metadata::metadata::{database, Type}; + +/// A type for translation errors. +#[derive(Debug, Clone, thiserror::Error)] +pub enum Error { + CollectionNotFound(models::CollectionName), + InvalidCollectionName(String), + ScalarTypeNotFound(models::ScalarTypeName), + ProcedureNotFound(models::ProcedureName), + ColumnNotFoundInCollection(models::FieldName, models::CollectionName), + RelationshipNotFound(models::RelationshipName), + ArgumentNotFound(models::ArgumentName), + OperatorNotFound { + operator_name: models::ComparisonOperatorName, + type_name: models::ScalarTypeName, + }, + NonScalarTypeUsedInOperator { + r#type: database::Type, + }, + RelationshipArgumentWasOverriden(models::ArgumentName), + EmptyPathForOrderByAggregate, + MissingAggregateForArrayRelationOrdering, + TypeMismatch(serde_json::Value, models::ScalarTypeName), + UnexpectedVariable, + CapabilityNotSupported(UnsupportedCapabilities), + UnableToDeserializeNumberAsF64(serde_json::Number), + ColumnIsGenerated(models::FieldName), + ColumnIsIdentityAlways(models::FieldName), + MissingColumnInMutation { + collection: models::CollectionName, + column_name: models::FieldName, + operation: String, + }, + NotImplementedYet(String), + NoProcedureResultFieldsRequested, + UnexpectedStructure(String), + UnexpectedOperation { + column_name: models::FieldName, + operation: String, + available_operations: Vec, + }, + InternalError(String), + NestedArrayTypesNotSupported, + NestedArraysNotSupported { + field_name: models::FieldName, + }, + NestedFieldNotOfCompositeType { + field_name: models::FieldName, + actual_type: Type, + }, + NestedFieldNotOfArrayType { + field_name: models::FieldName, + actual_type: Type, + }, +} + +/// Capabilities we don't currently support. +#[derive(Debug, Clone)] +pub enum UnsupportedCapabilities { + FieldArguments, +} + +impl std::fmt::Display for UnsupportedCapabilities { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + UnsupportedCapabilities::FieldArguments => write!(f, "Field arguments"), + } + } +} + +/// Display errors. +impl std::fmt::Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + Error::CollectionNotFound(collection_name) => { + write!(f, "Collection '{collection_name}' not found.") + } + Error::InvalidCollectionName(collection_name) => { + write!(f, "Invalid collection name '{collection_name}'.") + } + Error::ScalarTypeNotFound(scalar_type) => { + write!(f, "Scalar Type '{scalar_type}' not found.") + } + Error::ProcedureNotFound(procedure_name) => { + write!(f, "Procedure '{procedure_name}' not found.") + } + Error::ColumnNotFoundInCollection(column_name, collection_name) => write!( + f, + "Column '{column_name}' not found in collection '{collection_name}'." + ), + Error::RelationshipNotFound(relationship_name) => { + write!(f, "Relationship '{relationship_name}' not found.") + } + Error::ArgumentNotFound(argument) => { + write!(f, "Argument '{argument}' not found.") + } + Error::OperatorNotFound { + operator_name, + type_name, + } => { + write!( + f, + "Operator '{operator_name}' not found in type {type_name:?}." + ) + } + Error::RelationshipArgumentWasOverriden(key) => { + write!(f, "The relationship argument '{key}' was defined as part of the relationship, but was overriden.") + } + Error::EmptyPathForOrderByAggregate => { + write!(f, "No path elements supplied for order by aggregate.") + } + Error::MissingAggregateForArrayRelationOrdering => { + write!( + f, + "No aggregation function was suppilied for ordering on an array relationship." + ) + } + Error::TypeMismatch(value, typ) => { + write!(f, "Value '{value:?}' is not of type '{typ:?}'.") + } + Error::UnexpectedVariable => { + write!( + f, + "Unexpected variable in a query request which does not contain variables." + ) + } + Error::UnableToDeserializeNumberAsF64(num) => { + write!(f, "Unable to deserialize the number '{num}' as f64.") + } + Error::ColumnIsGenerated(column) => { + write!(f, "Unable to insert into the generated column '{column}'.") + } + Error::ColumnIsIdentityAlways(column) => { + write!(f, "Unable to insert into the identity column '{column}'.") + } + Error::MissingColumnInMutation { + column_name, + collection: procedure_name, + operation, + } => { + write!( + f, + "Unable to {operation} '{procedure_name}'. Column '{column_name}' is missing." + ) + } + Error::CapabilityNotSupported(thing) => { + write!(f, "Queries containing {thing} are not supported.") + } + Error::NotImplementedYet(thing) => { + write!(f, "Queries containing {thing} are not supported.") + } + Error::NoProcedureResultFieldsRequested => write!( + f, + "Procedure requests must ask for 'affected_rows' or use the 'returning' clause." + ), + Error::UnexpectedOperation { + column_name, + operation, + available_operations, + } => { + let mut string = format!( + "Unexpected operation '{operation}' on column {column_name}. Expected one of: " + ); + for (index, op) in available_operations.iter().enumerate() { + string.push_str(op); + if index < available_operations.len() - 1 { + string.push_str(", "); + } + } + write!(f, "{string}") + } + Error::UnexpectedStructure(structure) => write!(f, "Unexpected {structure}."), + Error::InternalError(thing) => { + write!(f, "Internal error: {thing}.") + } + Error::NonScalarTypeUsedInOperator { r#type } => { + write!(f, "Non-scalar-type used in operator: {type:?}") + } + Error::NestedArrayTypesNotSupported => { + write!(f, "Encountered a nested array type.") + } + Error::NestedArraysNotSupported { field_name } => { + write!(f, "Nested field '{field_name}' requested as nested array.") + } + Error::NestedFieldNotOfCompositeType { + field_name, + actual_type, + } => { + write!( + f, + "Nested field '{field_name}' not of composite type. Actual type: {actual_type:?}" + ) + } + Error::NestedFieldNotOfArrayType { + field_name, + actual_type, + } => { + write!( + f, + "Nested field '{field_name}' not of array type. Actual type: {actual_type:?}" + ) + } + } + } +} + +/// A type for translation warnings. +#[derive(Debug, Clone)] +pub enum Warning { + GeneratingMutationSkippedBecauseColumnNotFoundInCollection { + mutation_type: String, + column: models::FieldName, + collection: models::CollectionName, + db_constraint_name: String, + }, + GeneratingMutationSkippedBecauseNoColumnsInConstraint { + mutation_type: String, + db_constraint_name: String, + collection: models::CollectionName, + }, +} + +/// Display warnings. +impl std::fmt::Display for Warning { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + Warning::GeneratingMutationSkippedBecauseColumnNotFoundInCollection { + mutation_type, + column, + collection, + db_constraint_name, + } => write!( + f, + "Could not generate {mutation_type} procedure for collection '{collection}': +Column '{column}' is specified in a uniqueness constraint '{db_constraint_name}' but is missing from the collection." + ), + Warning::GeneratingMutationSkippedBecauseNoColumnsInConstraint { + mutation_type, + db_constraint_name, + collection, + } => write!( + f, + "Could not generate {mutation_type} procedure for collection '{collection}': +Uniqueness constraint '{db_constraint_name}' has no columns." + ), + } + } +} diff --git a/crates/query-engine/translation/src/translation/helpers.rs b/crates/query-engine/translation/src/translation/helpers.rs new file mode 100644 index 0000000..216740f --- /dev/null +++ b/crates/query-engine/translation/src/translation/helpers.rs @@ -0,0 +1,321 @@ +//! Helpers for processing requests and building SQL. + +use ndc_models::{self as models}; + +use super::error::Error; +use query_engine_metadata::metadata; +use query_engine_sql::sql; + +#[derive(Debug)] +/// Static information from the query and metadata. +pub struct Env<'request> { + pub(crate) metadata: &'request metadata::Metadata, +} + +#[derive(Debug)] +/// Stateful information changed throughout the translation process. +pub struct State { + // native_queries: NativeQueries, + global_table_index: TableAliasIndex, +} + +#[derive(Debug)] +/// Used for generating a unique name for intermediate tables. +pub struct TableAliasIndex(pub u64); + +/// For the root table in the query, and for the current table we are processing, +/// We'd like to track what is their reference in the query (the name we can use to address them, +/// an alias we generate), and what is their name in the metadata (so we can get +/// their information such as which columns are available for that table). +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RootAndCurrentTables { + /// The root (top-most) table in the query. + pub root_table: TableNameAndReference, + /// The current table we are processing. + pub current_table: TableNameAndReference, +} + +/// For a table in the query, We'd like to track what is its reference in the query +/// (the name we can use to address them, an alias we generate), and what is their name in the +/// metadata (so we can get their information such as which columns are available for that table). +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TableNameAndReference { + /// Table name for column lookup + pub name: models::CollectionName, + /// Table alias to query from + pub reference: sql::ast::TableReference, +} + +#[derive(Debug)] +/// Information about columns +pub struct ColumnInfo { + pub name: sql::ast::ColumnName, + pub r#type: metadata::Type, +} + +#[derive(Debug)] +/// Metadata information about a specific collection, i.e. something which can be queried at the +/// top level. +pub enum CollectionInfo<'env> { + Table { + name: &'env models::CollectionName, + info: &'env metadata::TableInfo, + }, +} + +#[derive(Debug)] +/// Metadata information about any object that can have fields +pub enum FieldsInfo<'env> { + Table { + name: &'env models::CollectionName, + info: &'env metadata::TableInfo, + }, +} + +impl<'a> From<&'a CollectionInfo<'a>> for FieldsInfo<'a> { + fn from(value: &'a CollectionInfo<'a>) -> Self { + match value { + CollectionInfo::Table { name, info } => FieldsInfo::Table { name, info }, + } + } +} + +impl<'request> Env<'request> { + /// Run a closure with an empty environment. + /// This should only be used for tests. + /// + /// The reason we cannot just construct and return an empty `Env` is that it contains borrowed + /// data. Therefore we take a continuation instead which can do what it likes with the `Env`. + /// Both the `Env` and its borrowed data can then be dropped when the continuation returns. + pub fn with_empty(f: F) -> R + where + F: FnOnce(Env) -> R, + { + let temp_metadata = metadata::Metadata::empty(); + let temp_env = Env { + metadata: &temp_metadata, + }; + f(temp_env) + } + + /// Create a new Env by supplying the metadata and relationships. + pub fn new(metadata: &'request metadata::Metadata) -> Self { + Env { metadata } + } + + /// Lookup a metadata object that may contain fields. This may be any of Tables, Native + /// Queries, and Composite Types. + /// + /// This is used to translate field selection, where any of these may occur. + pub fn lookup_fields_info( + &self, + type_name: &'request models::CollectionName, + ) -> Result, Error> { + // Lookup the fields of a type name in a specific order: + // tables, then composite types, then native queries. + let info = self + .metadata + .tables + .0 + .get(type_name) + .map(|t| FieldsInfo::Table { + name: type_name, + info: t, + }); + + info.ok_or(Error::CollectionNotFound(type_name.as_str().into())) + } + + /// Lookup a collection's information in the metadata. + pub fn lookup_collection( + &self, + collection_name: &'request models::CollectionName, + ) -> Result, Error> { + let table = self + .metadata + .tables + .0 + .get(collection_name.as_str()) + .map(|t| CollectionInfo::Table { + name: collection_name, + info: t, + }); + + if let Some(table) = table { + Ok(table) + } else { + Err(Error::CollectionNotFound(collection_name.clone())) + } + } + + /// Looks up the binary comparison operator's PostgreSQL name and arguments' type in the metadata. + pub fn lookup_comparison_operator( + &self, + scalar_type: &models::ScalarTypeName, + name: &models::ComparisonOperatorName, + ) -> Result<&'request metadata::ComparisonOperator, Error> { + self.metadata + .scalar_types + .0 + .get(scalar_type) + .and_then(|t| t.comparison_operators.get(name)) + .ok_or(Error::OperatorNotFound { + operator_name: name.clone(), + type_name: scalar_type.clone(), + }) + } + + /// Lookup type representation of a type. + pub fn lookup_type_representation( + &self, + scalar_type: &models::ScalarTypeName, + ) -> Option<&metadata::TypeRepresentation> { + self.metadata + .scalar_types + .0 + .get(scalar_type) + .and_then(|t| t.type_representation.as_ref()) + } + + /// Lookup a scalar type by its name in the ndc schema. + pub(crate) fn lookup_scalar_type( + &self, + t: &models::ScalarTypeName, + ) -> Result<&metadata::ScalarType, Error> { + self.metadata + .scalar_types + .0 + .get(t) + .ok_or(Error::ScalarTypeNotFound(t.clone())) + } +} + +impl FieldsInfo<'_> { + /// Lookup a column in a collection. + pub fn lookup_column(&self, column_name: &models::FieldName) -> Result { + match self { + FieldsInfo::Table { name, info } => info + .columns + .get(column_name.as_str()) + .map(|column_info| ColumnInfo { + name: sql::ast::ColumnName(column_info.name.clone()), + r#type: column_info.r#type.clone(), + }) + .ok_or_else(|| { + Error::ColumnNotFoundInCollection(column_name.clone(), (*name).clone()) + }), + // FieldsInfo::NativeQuery { name, info } => info + // .columns + // .get(column_name) + // .map(|column_info| ColumnInfo { + // name: sql::ast::ColumnName(column_info.name.clone()), + // r#type: column_info.r#type.clone(), + // }) + // .ok_or_else(|| { + // Error::ColumnNotFoundInCollection(column_name.clone(), name.as_str().into()) + // }), + } + } +} + +impl CollectionInfo<'_> { + /// Lookup a column in a collection. + pub fn lookup_column(&self, column_name: &models::FieldName) -> Result { + FieldsInfo::from(self).lookup_column(column_name) + } +} + +impl Default for State { + fn default() -> State { + State { + // native_queries: NativeQueries::new(), + global_table_index: TableAliasIndex(0), + } + } +} + +impl State { + /// Build a new state. + pub fn new() -> State { + State::default() + } + + // aliases + + /// Create table aliases using this function so they get a unique index. + pub fn make_table_alias(&mut self, name: String) -> sql::ast::TableAlias { + self.global_table_index.make_table_alias(name) + } + + /// Create a table alias for order by target part. + /// Provide an index and a source table name (to disambiguate the table being queried), + /// and get an alias. + pub fn make_order_path_part_table_alias(&mut self, table_name: &str) -> sql::ast::TableAlias { + self.make_table_alias(format!("ORDER_PART_{table_name}")) + } + + /// Create a table alias for order by column. + /// Provide an index and a source table name (to point at the table being ordered), + /// and get an alias. + pub fn make_order_by_table_alias(&mut self, source_table_name: &str) -> sql::ast::TableAlias { + self.make_table_alias(format!("ORDER_FOR_{source_table_name}")) + } + + pub fn make_native_query_table_alias(&mut self, name: &str) -> sql::ast::TableAlias { + self.make_table_alias(format!("NATIVE_QUERY_{name}")) + } + + /// Create a table alias for boolean expressions. + /// Provide state for fresh names and a source table name (to point at the table + /// being filtered), and get an alias. + pub fn make_boolean_expression_table_alias( + &mut self, + source_table_name: &str, + ) -> sql::ast::TableAlias { + self.make_table_alias(format!("BOOLEXP_{source_table_name}")) + } +} + +impl TableAliasIndex { + /// increment the table index and return the current one. + fn next_global_table_index(&mut self) -> TableAliasIndex { + let index = self.0; + *self = TableAliasIndex(index + 1); + TableAliasIndex(index) + } + + /// Create table aliases using this function so they get a unique index. + pub fn make_table_alias(&mut self, name: String) -> sql::ast::TableAlias { + sql::ast::TableAlias { + unique_index: self.next_global_table_index().0, + name, + } + } +} + +/// A newtype wrapper around an ndc-spec type which represents accessing a nested field. +#[derive(Debug, Clone)] +pub struct FieldPath(pub Vec); + +impl From<&Option>> for FieldPath { + fn from(field_path: &Option>) -> Self { + FieldPath(match field_path { + // The option has no logical function other than to avoid breaking changes. + None => vec![], + Some(vec) => vec.clone(), + }) + } +} + +/// Fold an expression inside of a chain of field path accessors. +pub fn wrap_in_field_path( + field_path: &FieldPath, + expression: sql::ast::Expression, +) -> sql::ast::Expression { + field_path.0.iter().fold(expression, |expression, field| { + sql::ast::Expression::NestedFieldSelect { + expression: Box::new(expression), + nested_field: sql::ast::NestedField(field.clone().into()), + } + }) +} diff --git a/crates/query-engine/translation/src/translation/mod.rs b/crates/query-engine/translation/src/translation/mod.rs new file mode 100644 index 0000000..df594ec --- /dev/null +++ b/crates/query-engine/translation/src/translation/mod.rs @@ -0,0 +1,5 @@ +//! Translate the incoming QueryRequest to an ExecutionPlan (SQL) to be run against the database. + +pub mod error; +pub mod helpers; +pub mod query; diff --git a/crates/query-engine/translation/src/translation/query/fields.rs b/crates/query-engine/translation/src/translation/query/fields.rs new file mode 100644 index 0000000..60d1f5a --- /dev/null +++ b/crates/query-engine/translation/src/translation/query/fields.rs @@ -0,0 +1,186 @@ +//! Handle 'rows' and 'aggregates' translation. + +use indexmap::IndexMap; + +use ndc_models as models; + +use crate::translation::error::Error; +use crate::translation::error::UnsupportedCapabilities; +use crate::translation::helpers::FieldsInfo; +use crate::translation::helpers::{Env, State, TableNameAndReference}; +use query_engine_metadata::metadata::{Type, TypeRepresentation}; +use query_engine_sql::sql; + +/// Translate the field-selection of a query to SQL. +/// Because field selection may be nested this function is mutually recursive with +/// 'translate_nested_field'. +pub(crate) fn translate_fields( + env: &Env, + _state: &mut State, + fields: IndexMap, + current_table: &TableNameAndReference, + from: sql::ast::From, +) -> Result { + // find the table according to the metadata. + let fields_info = env.lookup_fields_info(¤t_table.name)?; + + let columns: Vec<(sql::ast::ColumnAlias, sql::ast::Expression)> = fields + .into_iter() + .map(|(alias, field)| match field { + models::Field::Column { + column, + fields: None, + arguments, + } if arguments.is_empty() => unpack_and_wrap_fields( + env, + current_table, + &column, + sql::helpers::make_column_alias(alias.to_string()), + &fields_info, + ), + models::Field::Column { + column: _, + fields: _, + arguments: _, + } => Err(Error::CapabilityNotSupported( + UnsupportedCapabilities::FieldArguments, + )), + ndc_models::Field::Relationship { .. } => todo!(), + }) + .collect::, Error>>()?; + + let mut select = sql::helpers::simple_select(columns); + + select.from = Some(from); + + Ok(select) +} + +#[allow(clippy::too_many_arguments)] +/// In order to return the expected type representation for each column, +/// we need to wrap columns in type representation cast, and unpack composite types +/// so we can wrap them. +fn unpack_and_wrap_fields( + env: &Env, + current_table: &TableNameAndReference, + + column: &models::FieldName, + alias: sql::ast::ColumnAlias, + fields_info: &FieldsInfo<'_>, +) -> Result<(sql::ast::ColumnAlias, sql::ast::Expression), Error> { + let column_info = fields_info.lookup_column(column)?; + + // Different kinds of types have different strategy for converting to their + // type representation. + match column_info.r#type { + // Scalar types can just be wrapped in a cast. + Type::ScalarType(scalar_type) => { + let column_type_representation = env.lookup_type_representation(&scalar_type); + let (alias, expression) = sql::helpers::make_column( + current_table.reference.clone(), + column_info.name.clone(), + alias, + ); + Ok(( + alias, + wrap_in_type_representation(expression, column_type_representation), + )) + } + Type::ArrayType(ref type_boxed) => match **type_boxed { + Type::ArrayType(_) => Err(Error::NestedArraysNotSupported { + field_name: column.clone(), + }), + Type::ScalarType(ref scalar_type) => { + let inner_column_type_representation = env.lookup_type_representation(scalar_type); + let (alias, expression) = sql::helpers::make_column( + current_table.reference.clone(), + column_info.name.clone(), + alias, + ); + Ok(( + alias, + wrap_array_in_type_representation(expression, inner_column_type_representation), + )) + } + }, + } +} + +/// Certain type representations require that we provide a different json representation +/// than what postgres will return. +/// For array columns of those type representation, we wrap the result in a cast. +fn wrap_array_in_type_representation( + expression: sql::ast::Expression, + column_type_representation: Option<&TypeRepresentation>, +) -> sql::ast::Expression { + match column_type_representation { + None => expression, + Some(type_rep) => { + if let Some(cast_type) = get_type_representation_cast_type(type_rep) { + sql::ast::Expression::Cast { + expression: Box::new(expression), + // make it an array of cast type + r#type: sql::ast::ScalarType::ArrayType(cast_type), + } + } else { + expression + } + } + } +} + +/// Certain type representations require that we provide a different json representation +/// than what postgres will return. +/// For columns of those type representation, we wrap the result in a cast. +fn wrap_in_type_representation( + expression: sql::ast::Expression, + column_type_representation: Option<&TypeRepresentation>, +) -> sql::ast::Expression { + match column_type_representation { + None => expression, + Some(type_rep) => { + if let Some(cast_type) = get_type_representation_cast_type(type_rep) { + sql::ast::Expression::Cast { + expression: Box::new(expression), + r#type: sql::ast::ScalarType::BaseType(cast_type), + } + } else { + expression + } + } + } +} + +/// If a type representation requires a cast, return the scalar type name. +fn get_type_representation_cast_type( + type_representation: &TypeRepresentation, +) -> Option { + match type_representation { + // In these situations, we expect to cast the expression according + // to the type representation. + TypeRepresentation::Int64AsString | TypeRepresentation::BigDecimalAsString => { + Some(sql::helpers::text_type_name()) + } + + // In these situations the type representation should be the same as + // the expression, so we don't cast it. + TypeRepresentation::Boolean + | TypeRepresentation::String + | TypeRepresentation::Float32 + | TypeRepresentation::Float64 + | TypeRepresentation::Int16 + | TypeRepresentation::Int32 + | TypeRepresentation::Int64 + | TypeRepresentation::BigDecimal + | TypeRepresentation::Timestamp + | TypeRepresentation::Timestamptz + | TypeRepresentation::Time + | TypeRepresentation::Timetz + | TypeRepresentation::Date + | TypeRepresentation::UUID + | TypeRepresentation::Geography + | TypeRepresentation::Geometry + | TypeRepresentation::Json + | TypeRepresentation::Enum(_) => None, + } +} diff --git a/crates/query-engine/translation/src/translation/query/filtering.rs b/crates/query-engine/translation/src/translation/query/filtering.rs new file mode 100644 index 0000000..7851653 --- /dev/null +++ b/crates/query-engine/translation/src/translation/query/filtering.rs @@ -0,0 +1,651 @@ +//! Handle filtering/where clauses translation. + +use ndc_models as models; +use query_engine_metadata::metadata; +use query_engine_sql::sql::helpers::where_exists_select; + +use super::root; +use super::values; +use crate::translation::error::Error; +use crate::translation::helpers::wrap_in_field_path; +use crate::translation::helpers::{ + ColumnInfo, Env, RootAndCurrentTables, State, TableNameAndReference, +}; +use query_engine_metadata::metadata::database; +use query_engine_sql::sql; +use std::collections::VecDeque; +use std::vec; + +/// Translate a boolean expression to a SQL expression. +pub fn translate_expression( + env: &Env, + state: &mut State, + root_and_current_tables: &RootAndCurrentTables, + predicate: &models::Expression, +) -> Result { + // Fetch the filter expression and the relevant joins. + let (filter_expression, joins) = + translate_expression_with_joins(env, state, root_and_current_tables, predicate)?; + + let mut joins = VecDeque::from(joins); + let filter = match joins.pop_front() { + // When there are no joins, the expression will suffice. + None => filter_expression, + // When there are joins, wrap in an EXISTS query. + Some(first) => where_exists_select( + { + let (select, alias) = first.get_select_and_alias(); + sql::ast::From::Select { alias, select } + }, + sql::ast::Where(filter_expression), + ), + }; + + Ok(filter) +} + +/// Translate a boolean expression to a SQL expression and also provide all of the joins necessary +/// for the execution. +pub fn translate_expression_with_joins( + env: &Env, + state: &mut State, + root_and_current_tables: &RootAndCurrentTables, + predicate: &models::Expression, +) -> Result<(sql::ast::Expression, Vec), Error> { + match predicate { + models::Expression::And { expressions } => { + let mut acc_joins = vec![]; + let and_exprs = expressions + .iter() + .map(|expr| { + translate_expression_with_joins(env, state, root_and_current_tables, expr) + }) + .try_fold( + sql::ast::Expression::Value(sql::ast::Value::Bool(true)), + |acc, expr| { + let (right, right_joins) = expr?; + acc_joins.extend(right_joins); + Ok(sql::ast::Expression::And { + left: Box::new(acc), + right: Box::new(right), + }) + }, + )?; + Ok((and_exprs, acc_joins)) + } + models::Expression::Or { expressions } => { + let mut acc_joins = vec![]; + let or_exprs = expressions + .iter() + .map(|expr| { + translate_expression_with_joins(env, state, root_and_current_tables, expr) + }) + .try_fold( + sql::ast::Expression::Value(sql::ast::Value::Bool(false)), + |acc, expr| { + let (right, right_joins) = expr?; + acc_joins.extend(right_joins); + Ok(sql::ast::Expression::Or { + left: Box::new(acc), + right: Box::new(right), + }) + }, + )?; + Ok((or_exprs, acc_joins)) + } + models::Expression::Not { expression } => { + let (expr, joins) = + translate_expression_with_joins(env, state, root_and_current_tables, expression)?; + Ok((sql::ast::Expression::Not(Box::new(expr)), joins)) + } + models::Expression::BinaryComparisonOperator { + column, + operator, + value, + } => { + let left_typ = get_comparison_target_type(env, root_and_current_tables, column)?; + let op = env.lookup_comparison_operator(&left_typ, operator)?; + if op.operator_kind == metadata::OperatorKind::In { + let mut joins = vec![]; + let (left, left_joins) = + translate_comparison_target(env, state, root_and_current_tables, column)?; + joins.extend(left_joins); + + match value { + models::ComparisonValue::Column { column } => { + let (right, right_joins) = translate_comparison_target( + env, + state, + root_and_current_tables, + column, + )?; + joins.extend(right_joins); + + let right = vec![make_unnest_subquery(state, right)]; + + Ok(( + sql::ast::Expression::BinaryArrayOperation { + left: Box::new(left), + operator: sql::ast::BinaryArrayOperator::In, + right, + }, + joins, + )) + } + models::ComparisonValue::Scalar { value: json_value } => match json_value { + serde_json::Value::Array(values) => { + // The expression on the left is definitely not IN an empty list of values + if values.is_empty() { + Ok((sql::helpers::false_expr(), joins)) + } else { + let right = values + .iter() + .map(|value| { + let (right, right_joins) = translate_comparison_value( + env, + state, + root_and_current_tables, + &models::ComparisonValue::Scalar { + value: value.clone(), + }, + &database::Type::ScalarType(left_typ.clone()), + )?; + joins.extend(right_joins); + Ok(right) + }) + .collect::, Error>>()?; + + Ok(( + sql::ast::Expression::BinaryArrayOperation { + left: Box::new(left), + operator: sql::ast::BinaryArrayOperator::In, + right, + }, + joins, + )) + } + } + _ => Err(Error::TypeMismatch(json_value.clone(), left_typ)), + }, + models::ComparisonValue::Variable { .. } => { + let array_type = database::Type::ArrayType(Box::new( + database::Type::ScalarType(left_typ), + )); + let (right, right_joins) = translate_comparison_value( + env, + state, + root_and_current_tables, + value, + &array_type, + )?; + joins.extend(right_joins); + + let right = Box::new(make_unnest_subquery(state, right)); + + Ok(( + sql::ast::Expression::BinaryOperation { + left: Box::new(left), + operator: sql::ast::BinaryOperator(op.operator_name.clone()), + right, + }, + joins, + )) + } + } + } else { + let mut joins = vec![]; + let (left, left_joins) = + translate_comparison_target(env, state, root_and_current_tables, column)?; + joins.extend(left_joins); + + let (right, right_joins) = translate_comparison_value( + env, + state, + root_and_current_tables, + value, + &database::Type::ScalarType(op.argument_type.clone()), + )?; + joins.extend(right_joins); + + if op.is_infix { + Ok(( + sql::ast::Expression::BinaryOperation { + left: Box::new(left), + operator: sql::ast::BinaryOperator(op.operator_name.clone()), + right: Box::new(right), + }, + joins, + )) + } else { + Ok(( + sql::ast::Expression::FunctionCall { + function: sql::ast::Function::Unknown(op.operator_name.clone()), + args: vec![left, right], + }, + joins, + )) + } + } + } + + models::Expression::Exists { + in_collection, + predicate, + } => match predicate { + None => Ok((sql::helpers::true_expr(), vec![])), + Some(predicate) => Ok(( + translate_exists_in_collection( + env, + state, + root_and_current_tables, + in_collection.clone(), + predicate, + )?, + vec![], + )), + }, + models::Expression::UnaryComparisonOperator { column, operator } => match operator { + models::UnaryComparisonOperator::IsNull => { + let (value, joins) = + translate_comparison_target(env, state, root_and_current_tables, column)?; + + Ok(( + sql::ast::Expression::UnaryOperation { + expression: Box::new(value), + operator: sql::ast::UnaryOperator::IsNull, + }, + joins, + )) + } + }, + } +} + +// /// Given a vector of PathElements and the table alias for the table the +// /// expression is over, we return a join in the form of: +// /// +// /// > FULL OUTER JOIN LATERAL ( +// /// > SELECT .* FROM ( +// /// > ( +// /// > SELECT * +// /// > FROM +// /// >
AS +// /// > WHERE +// /// >
+// /// > AND +// /// > AS +// /// > ) +// /// > INNER JOIN LATERAL +// /// > ( +// /// > SELECT * +// /// > FROM +// /// >
AS +// /// > WHERE +// /// >
+// /// > AND +// /// > ) AS +// /// > ... +// /// > INNER JOIN LATERAL +// /// > ( +// /// > SELECT * +// /// > FROM +// /// >
AS +// /// > WHERE +// /// >
+// /// > AND +// /// > ) AS +// /// > ) AS +// /// > ) +// /// +// /// and the aliased table name under which the sought column can be found, i.e. +// /// the last drawn fresh name. Or, in the case of an empty paths vector, simply +// /// the alias that was input. +// fn translate_comparison_pathelements( +// env: &Env, +// state: &mut State, +// root_and_current_tables: &RootAndCurrentTables, +// path: &[models::PathElement], +// ) -> Result<(TableNameAndReference, Vec), Error> { +// let mut joins = vec![]; +// let RootAndCurrentTables { current_table, .. } = root_and_current_tables; + +// let final_ref = path.iter().try_fold( +// current_table.clone(), +// |current_table_ref, +// models::PathElement { +// relationship, +// predicate, +// arguments, +// }| { +// // // get the relationship table +// // let relationship_name = &relationship; +// // let relationship = env.lookup_relationship(relationship_name)?; + +// // new alias for the target table +// // let target_table_alias: sql::ast::TableAlias = +// // state.make_boolean_expression_table_alias(relationship.target_collection.as_str()); + +// // let arguments = relationships::make_relationship_arguments( +// // relationships::MakeRelationshipArguments { +// // caller_arguments: arguments.clone(), +// // relationship_arguments: relationship.arguments.clone(), +// // }, +// // )?; + +// // // create a from clause and get a reference of inner query. +// // let (table, from_clause) = root::make_from_clause_and_reference( +// // &relationship.target_collection, +// // &arguments, +// // env, +// // state, +// // Some(target_table_alias.clone()), +// // )?; + +// // // build a SELECT querying this table with the relevant predicate. +// let mut select = sql::helpers::simple_select(vec![]); +// // select.from = Some(from_clause); + +// select.select_list = sql::ast::SelectList::SelectStar; + +// let new_root_and_current_tables = RootAndCurrentTables { +// root_table: root_and_current_tables.root_table.clone(), +// current_table: TableNameAndReference { +// reference: table.reference.clone(), +// name: table.name.clone(), +// }, +// }; +// // relationship-specfic filter +// let (rel_cond, rel_joins) = match predicate { +// None => (sql::helpers::true_expr(), vec![]), +// Some(predicate) => translate_expression_with_joins( +// env, +// state, +// &new_root_and_current_tables, +// predicate, +// )?, +// }; + +// // relationship where clause +// let cond = relationships::translate_column_mapping( +// env, +// ¤t_table_ref, +// &table.reference, +// rel_cond, +// relationship, +// )?; + +// select.where_ = sql::ast::Where(cond); + +// select.joins = rel_joins; + +// joins.push(sql::ast::Join::InnerJoin(sql::ast::InnerJoin { +// select: Box::new(select), +// alias: target_table_alias, +// })); + +// Ok(new_root_and_current_tables.current_table) +// }, +// )?; + +// let mut joins: VecDeque<_> = joins.into(); +// match joins.pop_front() { +// None => Ok((final_ref, vec![])), + +// // If we are fetching a nested column (we have joins), we wrap them in a select that fetches +// // columns from the last table in the chain. +// Some(first) => { +// let mut outer_select = sql::helpers::simple_select(vec![]); +// outer_select.select_list = sql::ast::SelectList::SelectStarFrom(final_ref.reference); +// let (select, alias) = first.get_select_and_alias(); +// outer_select.from = Some(sql::ast::From::Select { select, alias }); +// outer_select.joins = joins.into(); + +// let alias = state.make_boolean_expression_table_alias(final_ref.name.as_str()); +// let reference = sql::ast::TableReference::AliasedTable(alias.clone()); + +// Ok(( +// TableNameAndReference { +// reference, +// name: final_ref.name.clone(), +// }, +// // create a join from the select. +// // We use a full outer join so even if one of the sides does not contain rows, +// // We can still select values. +// // See a more elaborated explanation: https://github.com/hasura/ndc-postgres/pull/463#discussion_r1601884534 +// vec![sql::ast::Join::FullOuterJoin(sql::ast::FullOuterJoin { +// select: Box::new(outer_select), +// alias, +// })], +// )) +// } +// } +// } + +/// translate a comparison target. +fn translate_comparison_target( + env: &Env, + _state: &mut State, + root_and_current_tables: &RootAndCurrentTables, + column: &models::ComparisonTarget, +) -> Result<(sql::ast::Expression, Vec), Error> { + match column { + models::ComparisonTarget::Column { + name, + path: _, + field_path, + } => { + let RootAndCurrentTables { root_table, .. } = root_and_current_tables; + + // get the unrelated table information from the metadata. + let collection_info = env.lookup_collection(&root_table.name)?; + let ColumnInfo { name, .. } = collection_info.lookup_column(name)?; + + Ok(( + wrap_in_field_path( + &field_path.into(), + sql::ast::Expression::ColumnReference(sql::ast::ColumnReference::TableColumn { + table: root_table.reference.clone(), + name, + }), + ), + vec![], + )) + } + + // Compare a column from the root table. + models::ComparisonTarget::RootCollectionColumn { name, field_path } => { + let RootAndCurrentTables { root_table, .. } = root_and_current_tables; + // get the unrelated table information from the metadata. + let collection_info = env.lookup_collection(&root_table.name)?; + + // find the requested column in the tables columns. + let ColumnInfo { name, .. } = collection_info.lookup_column(name)?; + + Ok(( + wrap_in_field_path( + &field_path.into(), + sql::ast::Expression::ColumnReference(sql::ast::ColumnReference::TableColumn { + table: root_table.reference.clone(), + name, + }), + ), + vec![], + )) + } + } +} + +/// translate a comparison value. +fn translate_comparison_value( + env: &Env, + state: &mut State, + root_and_current_tables: &RootAndCurrentTables, + value: &models::ComparisonValue, + typ: &database::Type, +) -> Result<(sql::ast::Expression, Vec), Error> { + match value { + models::ComparisonValue::Column { column } => { + translate_comparison_target(env, state, root_and_current_tables, column) + } + models::ComparisonValue::Scalar { value: json_value } => Ok(( + values::translate_json_value(env, state, json_value, typ)?, + vec![], + )), + models::ComparisonValue::Variable { name: _ } => todo!("Variables are not supported"), + } +} + +/// Translate an EXISTS clause into a SQL subquery of the following form: +/// +/// > EXISTS (SELECT 1 as 'one' FROM
AS WHERE ) +pub fn translate_exists_in_collection( + env: &Env, + state: &mut State, + root_and_current_tables: &RootAndCurrentTables, + in_collection: models::ExistsInCollection, + predicate: &models::Expression, +) -> Result { + match in_collection { + models::ExistsInCollection::Unrelated { + collection, + arguments: _, + } => { + // create a from clause and get a reference of inner query. + let (table, from_clause) = + root::make_from_clause_and_reference(&collection, None, env, state, None)?; + + // CockroachDB doesn't like empty selects, so we do "SELECT 1 as 'one' ..." + let column_alias = sql::helpers::make_column_alias("one".to_string()); + + let select_cols = vec![( + column_alias, + sql::ast::Expression::Value(sql::ast::Value::Int8(1)), + )]; + + // build a SELECT querying this table with the relevant predicate. + let mut select = sql::helpers::simple_select(select_cols); + select.from = Some(from_clause); + + let new_root_and_current_tables = RootAndCurrentTables { + root_table: root_and_current_tables.root_table.clone(), + current_table: TableNameAndReference { + reference: table.reference, + name: table.name, + }, + }; + + let (expr, _expr_joins) = translate_expression_with_joins( + env, + state, + &new_root_and_current_tables, + predicate, + )?; + select.where_ = sql::ast::Where(expr); + + // select.joins = expr_joins; + + // > EXISTS (SELECT 1 as 'one' FROM
AS WHERE ) + Ok(sql::ast::Expression::Exists { + select: Box::new(select), + }) + } + // We get a relationship name in exists, query the target table directly, + // and build a WHERE clause that contains the join conditions and the specified + // EXISTS condition. + models::ExistsInCollection::Related { + relationship: _, + arguments: _, + } => todo!("Relationship is supported"), + models::ExistsInCollection::NestedCollection { + column_name: _, + arguments: _, + field_path: _, + } => todo!("Filter by nested collection is not implemented yet"), + } +} + +/// Extract the scalar type of a comparison target +fn get_comparison_target_type( + env: &Env, + root_and_current_tables: &RootAndCurrentTables, + column: &models::ComparisonTarget, +) -> Result { + match column { + models::ComparisonTarget::RootCollectionColumn { name, field_path } => { + let column = env + .lookup_collection(&root_and_current_tables.root_table.name)? + .lookup_column(name)?; + + let mut field_path = match field_path { + None => VecDeque::new(), + Some(field_path) => field_path.iter().collect(), + }; + get_column_scalar_type_name(&column.r#type, &mut field_path) + } + models::ComparisonTarget::Column { + name, + path, + field_path, + } => { + let mut field_path = match field_path { + None => VecDeque::new(), + Some(field_path) => field_path.iter().collect(), + }; + match path.last() { + None => { + let column = env + .lookup_collection(&root_and_current_tables.current_table.name)? + .lookup_column(name)?; + + get_column_scalar_type_name(&column.r#type, &mut field_path) + } + Some(_last) => { + todo!("relationship is not supported") + } + } + } + } +} + +/// Extract the scalar type name of a column down their nested field path. +/// Will error if path do not lead to a scalar type. +fn get_column_scalar_type_name( + typ: &database::Type, + field_path: &mut VecDeque<&models::FieldName>, +) -> Result { + let field = field_path.pop_front(); + match typ { + database::Type::ScalarType(scalar_type) => match field { + None => Ok(scalar_type.clone()), + // todo: what about json? + Some(field) => Err(Error::ColumnNotFoundInCollection( + field.clone(), + scalar_type.as_str().into(), + )), + }, + database::Type::ArrayType(_) => Err(Error::NonScalarTypeUsedInOperator { + r#type: typ.clone(), + }), + } +} + +/// Make a select a subquery expression from an expression. +fn make_unnest_subquery( + state: &mut State, + expression: sql::ast::Expression, +) -> sql::ast::Expression { + let subquery_alias = state.make_table_alias("in_subquery".to_string()); + let subquery_reference = sql::ast::TableReference::AliasedTable(subquery_alias.clone()); + let subquery_from = sql::ast::From::Unnest { + expression, + column: sql::helpers::make_column_alias("value".to_string()), + alias: subquery_alias, + }; + let mut subquery = sql::helpers::simple_select(vec![sql::helpers::make_column( + subquery_reference, + sql::ast::ColumnName("value".to_string()), + sql::helpers::make_column_alias("value".to_string()), + )]); + subquery.from = Some(subquery_from); + sql::ast::Expression::CorrelatedSubSelect(Box::new(subquery)) +} diff --git a/crates/query-engine/translation/src/translation/query/mod.rs b/crates/query-engine/translation/src/translation/query/mod.rs new file mode 100644 index 0000000..fa54cda --- /dev/null +++ b/crates/query-engine/translation/src/translation/query/mod.rs @@ -0,0 +1,62 @@ +//! Translate an incoming `QueryRequest`. + +pub mod fields; +pub mod filtering; +pub mod root; +mod sorting; +pub mod values; + +use ndc_models::{self as models}; + +use crate::translation::error::Error; +use crate::translation::helpers::{Env, State}; +use query_engine_metadata::metadata; +use query_engine_sql::sql; + +/// Translate the incoming QueryRequest to an ExecutionPlan (SQL) to be run against the database. +pub fn translate( + metadata: &metadata::Metadata, + query_request: models::QueryRequest, +) -> Result, Error> { + let mut state = State::new(); + let env = Env::new(metadata); + + let collection_string = query_request.collection.as_str(); + let collection = if collection_string.contains(':') { + let split: Vec<&str> = collection_string.split(':').collect(); + if split.len() != 2 { + return Err(Error::InvalidCollectionName(collection_string.to_string())); + }; + let collection_name = models::CollectionName::new(split[0].into()); + let gsi_name = split[1]; + (collection_name, Some(sql::ast::Gsi(gsi_name.to_string()))) + } else { + (models::CollectionName::new(collection_string.into()), None) + }; + + let (query_limit, returns_field, select_set) = root::translate_query( + &env, + &mut state, + &root::MakeFrom::Collection { + name: collection.0.clone(), + arguments: query_request.arguments.clone(), + gsi: collection.1.clone(), + }, + &None, + &query_request.query, + )?; + + // form a single JSON item shaped `{ rows: [] }` + // that matches the models::RowSet type + let json_select = sql::helpers::select_rowset(select_set, &returns_field); + + // normalize ast + let json_select = sql::rewrites::constant_folding::normalize_select(json_select); + + Ok(sql::execution_plan::simple_query_execution_plan( + query_request.variables, + query_request.collection, + json_select, + query_limit, + )) +} diff --git a/crates/query-engine/translation/src/translation/query/operators.rs b/crates/query-engine/translation/src/translation/query/operators.rs new file mode 100644 index 0000000..5dd5e4a --- /dev/null +++ b/crates/query-engine/translation/src/translation/query/operators.rs @@ -0,0 +1,67 @@ +use ndc_sdk::models; + +use super::error::Error; +use query_engine_sql::sql; + +pub fn translate_operator( + operator: &models::BinaryComparisonOperator, +) -> Result { + match operator { + models::BinaryComparisonOperator::Equal => Ok(sql::ast::BinaryOperator::Equals), + models::BinaryComparisonOperator::Other { name } => { + lookup_by_name(name.as_str()).ok_or_else(|| Error::OperatorNotFound(name.clone())) + } + } +} + +fn lookup_by_name(name: &str) -> Option { + match name { + "_eq" => Some(sql::ast::BinaryOperator::Equals), + "_neq" => Some(sql::ast::BinaryOperator::NotEquals), + "_lt" => Some(sql::ast::BinaryOperator::LessThan), + "_lte" => Some(sql::ast::BinaryOperator::LessThanOrEqualTo), + "_gt" => Some(sql::ast::BinaryOperator::GreaterThan), + "_gte" => Some(sql::ast::BinaryOperator::GreaterThanOrEqualTo), + "_like" => Some(sql::ast::BinaryOperator::Like), + "_nlike" => Some(sql::ast::BinaryOperator::NotLike), + "_ilike" => Some(sql::ast::BinaryOperator::CaseInsensitiveLike), + "_nilike" => Some(sql::ast::BinaryOperator::NotCaseInsensitiveLike), + "_similar" => Some(sql::ast::BinaryOperator::Similar), + "_nsimilar" => Some(sql::ast::BinaryOperator::NotSimilar), + "_regex" => Some(sql::ast::BinaryOperator::Regex), + "_nregex" => Some(sql::ast::BinaryOperator::NotRegex), + "_iregex" => Some(sql::ast::BinaryOperator::CaseInsensitiveRegex), + "_niregex" => Some(sql::ast::BinaryOperator::NotCaseInsensitiveRegex), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + // This test ties together the metadata and SQL comparison operators at test + // time, so we can avoid coupling them at compile time. + // + // It is not completely clear that this is necessary; this is more of an + // extra check. If it starts to become troublesome, we can remove it. + // + // This needs to live here because this module is private. + #[test] + fn test_metadata_binary_comparison_operators_are_in_sync_with_sql() { + for metadata_operator in + enum_iterator::all::() + { + let name = metadata_operator.name(); + let sql_operator = lookup_by_name(name) + .unwrap_or_else(|| panic!("{}", Error::OperatorNotFound(name.to_string()))); + // For lack of a better way to ensure we get the correct operator, + // we can use their `Debug` representation. We may have to evolve + // this if things become more sophisticated. + assert_eq!( + format!("{:?}", metadata_operator), + format!("{:?}", sql_operator) + ); + } + } +} diff --git a/crates/query-engine/translation/src/translation/query/root.rs b/crates/query-engine/translation/src/translation/query/root.rs new file mode 100644 index 0000000..0b9b188 --- /dev/null +++ b/crates/query-engine/translation/src/translation/query/root.rs @@ -0,0 +1,240 @@ +//! Handle 'rows' and 'aggregates' translation. + +use std::collections::BTreeMap; + +use indexmap::IndexMap; + +use ndc_models as models; +use query_engine_sql::sql::ast::ReturnsFields; + +use super::fields; +use super::filtering; +use super::sorting; +use crate::translation::error::Error; +use crate::translation::helpers::{ + CollectionInfo, Env, RootAndCurrentTables, State, TableNameAndReference, +}; +use query_engine_sql::sql; + +/// Translate a query to sql ast. +/// We return a select set with a SQL query for the two components - the rows and the aggregates. +pub fn translate_query( + env: &Env, + state: &mut State, + make_from: &MakeFrom, + join_predicate: &Option>, + query_request: &models::Query, +) -> Result<(Option, ReturnsFields, sql::helpers::SelectSet), Error> { + // translate rows selection. + let (returns_field, row_select) = + translate_rows_select(env, state, make_from, join_predicate, query_request)?; + + // // translate aggregate selection. + // let aggregate_select = + // translate_aggregate_select(env, state, make_from, join_predicate, query_request)?; + + // Create a structure describing the selection set - only rows, only aggregates, or both. + let (_, rows) = (&returns_field, row_select); + let select_set = ( + query_request.limit, + returns_field, + sql::helpers::SelectSet::Rows(rows), + ); + + Ok(select_set) +} + +/// Translate rows part of query to sql ast. +fn translate_rows_select( + env: &Env, + state: &mut State, + make_from: &MakeFrom, + join_predicate: &Option>, + query: &models::Query, +) -> Result<(ReturnsFields, sql::ast::Select), Error> { + let (current_table, from_clause) = make_reference_and_from_clause(env, state, make_from)?; + + // translate fields to select list + let fields = query.fields.clone().unwrap_or_default(); + + // remember whether we fields were requested or not. + // The case were fields were not requested, and also no aggregates were requested, + // can be used for `__typename` queries. + let returns_fields = if IndexMap::is_empty(&fields) { + ReturnsFields::NoFieldsWereRequested + } else { + ReturnsFields::FieldsWereRequested + }; + + let mut fields_select = match returns_fields { + ReturnsFields::FieldsWereRequested => { + fields::translate_fields(env, state, fields, ¤t_table, from_clause)? + } + ReturnsFields::NoFieldsWereRequested => { + let select_1 = sql::ast::SelectList::Select1; + + sql::ast::Select { + with: sql::helpers::empty_with(), + select_list: select_1, + from: Some(from_clause), + where_: sql::ast::Where(sql::helpers::empty_where()), + order_by: sql::helpers::empty_order_by(), + } + } + }; + + // Translate the common part of the query - where, order by, limit, etc. + translate_query_part( + env, + state, + ¤t_table, + join_predicate, + query, + &mut fields_select, + )?; + + Ok((returns_fields, fields_select)) +} + +/// Translate the lion (or common) part of 'rows' or 'aggregates' part of a query. +/// Specifically, from, joins, order bys, where, limit and offset clauses. +/// +/// This expects to get the relevant information about tables, relationships, the root table, +/// and the query, as well as the columns and join fields after processing. +pub fn translate_query_part( + env: &Env, + state: &mut State, + current_table: &TableNameAndReference, + _join_predicate: &Option>, + query: &models::Query, + select: &mut sql::ast::Select, +) -> Result<(), Error> { + // the root table and the current table are the same at this point + let root_and_current_tables = RootAndCurrentTables { + root_table: current_table.clone(), + current_table: current_table.clone(), + }; + + // translate order_by + let order_by = + sorting::translate_order_by(env, state, &root_and_current_tables, &query.order_by)?; + + // select.joins.extend(order_by_joins); + + // translate where + let filter = match &query.predicate { + None => Ok(sql::helpers::true_expr()), + Some(predicate) => { + filtering::translate_expression(env, state, &root_and_current_tables, predicate) + } + }?; + + select.where_ = sql::ast::Where(filter); + + select.order_by = order_by; + + Ok(()) +} + +/// Create a from clause from a collection name and its reference. +pub fn make_from_clause_and_reference( + collection_name: &models::CollectionName, + gsi_name: Option, + env: &Env, + state: &mut State, + collection_alias: Option, +) -> Result<(TableNameAndReference, sql::ast::From), Error> { + let collection_alias = match collection_alias { + None => state.make_table_alias(collection_name.to_string()), + Some(_alias) => todo!("alias not supported"), + }; + // find the table according to the metadata. + let collection_info = env.lookup_collection(collection_name)?; + let from_clause = make_from_clause(state, &collection_alias, &collection_info, gsi_name); + + let collection_alias_name = sql::ast::TableReference::AliasedTable(collection_alias); + let current_table = TableNameAndReference { + name: collection_name.clone(), + reference: collection_alias_name, + }; + Ok((current_table, from_clause)) +} + +/// Build a FROM clause from a collection info and an alias. +/// Will add a Native Query to the 'State' if the collection is a native query. +fn make_from_clause( + _state: &mut State, + current_table_alias: &sql::ast::TableAlias, + collection_info: &CollectionInfo, + gsi_name: Option, +) -> sql::ast::From { + match collection_info { + CollectionInfo::Table { info, .. } => { + let db_table = sql::ast::TableReference::DBTable { + table: sql::ast::TableName(info.table_name.clone()), + gsi: gsi_name, + }; + sql::ast::From::Table { + reference: db_table, + alias: current_table_alias.clone(), + } + } + } +} + +/// Join predicate. +pub struct JoinPredicate<'a, 'b> { + /// Join the current table with this table. + pub join_with: &'a TableNameAndReference, + /// This is the description of the relationship. + pub relationship: &'b models::Relationship, +} + +/// Arguments to build a from clause. +pub enum MakeFrom { + /// From a collection (db table, native query). + Collection { + /// Used for generating aliases. + name: models::CollectionName, + gsi: Option, + /// Native query arguments. + arguments: BTreeMap, + }, + /// From an existing relation. + TableReference { + /// Used for generating aliases. + name: models::CollectionName, + /// The reference name to the existing relation. + reference: sql::ast::TableReference, + }, +} + +/// Build a from clause and return the table name and reference. +fn make_reference_and_from_clause( + env: &Env, + state: &mut State, + make_from: &MakeFrom, +) -> Result<(TableNameAndReference, sql::ast::From), Error> { + match make_from { + MakeFrom::Collection { + name, + gsi, + arguments: _, + } => make_from_clause_and_reference(name, gsi.clone(), env, state, None), + MakeFrom::TableReference { name, reference } => { + let table_alias = state.make_table_alias(name.to_string()); + let from_clause = sql::ast::From::Table { + reference: reference.clone(), + alias: table_alias.clone(), + }; + let reference = sql::ast::TableReference::AliasedTable(table_alias); + Ok(( + TableNameAndReference { + name: name.clone(), + reference, + }, + from_clause, + )) + } + } +} diff --git a/crates/query-engine/translation/src/translation/query/sorting.rs b/crates/query-engine/translation/src/translation/query/sorting.rs new file mode 100644 index 0000000..29734d6 --- /dev/null +++ b/crates/query-engine/translation/src/translation/query/sorting.rs @@ -0,0 +1,421 @@ +//! Translate Order By clauses. +use multimap::MultiMap; +use std::collections::hash_map::DefaultHasher; +use std::hash::{Hash, Hasher}; + +use ndc_models as models; + +use crate::translation::error::Error; +use crate::translation::helpers::{ + wrap_in_field_path, CollectionInfo, Env, FieldPath, RootAndCurrentTables, State, + TableNameAndReference, +}; +use query_engine_sql::sql; + +// Top-level // + +/// Convert the order by fields from a QueryRequest to a SQL ORDER BY clause and potentially +/// JOINs when we order by relationship fields. +pub fn translate_order_by( + env: &Env, + state: &mut State, + root_and_current_tables: &RootAndCurrentTables, + order_by: &Option, +) -> Result { + // skip if there's no order by clause. + match order_by { + None => Ok(sql::ast::OrderBy { elements: vec![] }), + Some(models::OrderBy { elements }) => { + // Group order by elements by their paths, and translate each group + // to result order by columns (and their indices in the order by list) and joins + // containing selecting these columns from the relevant paths. + let element_groups = group_elements(elements); + let order_by_parts = element_groups + .iter() + .map(|element_group| { + translate_order_by_target_group( + env, + state, + root_and_current_tables, + element_group, + ) + }) + .collect::>, Error>>()?; + // flatten the result columns and sort by their indices in the order by list. + let mut order_by_columns = order_by_parts.into_iter().flatten().collect::>(); + order_by_columns.sort_by_key(|(index, _)| *index); + + // Discard the indices, construct an order by clause, and accompanied joins. + Ok(sql::ast::OrderBy { + elements: order_by_columns + .into_iter() + .map(|(_, order_by_element)| order_by_element) + .collect(), + }) + } + } +} + +// Types // + +/// Group columns or aggregates with the same path element. +/// Columns and aggregates need to be separated because they return +/// different amount on rows. +#[derive(Debug)] +enum OrderByElementGroup<'a> { + Columns { + path: &'a [models::PathElement], + columns: Vec>, + }, + Aggregates { + path: &'a [models::PathElement], + aggregates: Vec>, + }, +} + +/// A column or aggregate element with their index in the order by list +/// and their order by direction. +#[derive(Debug)] +struct GroupedOrderByElement { + index: usize, + direction: models::OrderDirection, + element: T, +} + +/// A column to select from a table used in an order by. +#[derive(Debug)] +struct Column(models::FieldName); + +/// An aggregate operation to select from a table used in an order by. +#[derive(Debug)] +enum Aggregate { + CountStarAggregate, + SingleColumnAggregate { column: models::FieldName }, +} + +impl OrderByElementGroup<'_> { + /// Extract the path component of a group. + fn path(&self) -> &[models::PathElement] { + match &self { + Self::Columns { path, .. } | Self::Aggregates { path, .. } => path, + } + } +} + +// Group elements // + +/// Group order by elements with the same path. Separate columns and aggregates +/// because they each return different amount of rows. +fn group_elements(elements: &[models::OrderByElement]) -> Vec { + // We need to jump through some hoops to group path elements because serde_json::Value + // does not have Ord or Hash instances. So we use u64 as a key derived from hashing the + // string representation of a path. + let hash_path = |path: &[models::PathElement]| { + let mut s = DefaultHasher::new(); + format!("{path:?}").hash(&mut s); + s.finish() + }; + + let mut column_element_groups: MultiMap< + u64, // path hash + ( + usize, // index + &[models::PathElement], // path + FieldPath, // field path + models::OrderDirection, // order by direction + Column, // column + ), + > = MultiMap::new(); + + let mut aggregate_element_groups: MultiMap< + u64, // path hash + ( + usize, // index + &[models::PathElement], // path + models::OrderDirection, // order by direction + Aggregate, // column + ), + > = MultiMap::new(); + + // for each element, insert them to their respective group according to their kind and path. + for (i, element) in elements.iter().enumerate() { + match &element.target { + models::OrderByTarget::Column { + path, + name, + field_path, + } => column_element_groups.insert( + hash_path(path), + ( + i, + path, + field_path.into(), + element.order_direction, + Column(name.clone()), + ), + ), + models::OrderByTarget::StarCountAggregate { path } => aggregate_element_groups.insert( + hash_path(path), + ( + i, + path, + element.order_direction, + Aggregate::CountStarAggregate, + ), + ), + models::OrderByTarget::SingleColumnAggregate { + path, + column, + function: _, + field_path: _, + } => aggregate_element_groups.insert( + hash_path(path), + ( + i, + path, + element.order_direction, + Aggregate::SingleColumnAggregate { + column: column.clone(), + }, + ), + ), + } + } + + // Ignore the hash that was only used to group the paths and construct an OrderByElementGroup. + let mut element_vecs = vec![]; + for (_, vec) in column_element_groups { + element_vecs.push(OrderByElementGroup::Columns { + // if it's here, there's at least one. + path: vec.first().unwrap().1, + columns: vec + .into_iter() + .map( + |(index, _, field_path, direction, element)| GroupedOrderByElement { + index, + direction, + element: (element, field_path), + }, + ) + .collect::>(), + }); + } + // Ignore the hash that was only used to group the paths and construct an OrderByElementGroup. + for (_, vec) in aggregate_element_groups { + element_vecs.push(OrderByElementGroup::Aggregates { + // if it's here, there's at least one. + path: vec.first().unwrap().1, + aggregates: vec + .into_iter() + .map(|(index, _, direction, element)| GroupedOrderByElement { + index, + direction, + element, + }) + .collect::>(), + }); + } + + element_vecs +} + +// Translate a group // + +/// Translate an order by group and add additional JOINs to the wrapping SELECT +/// and return the order by elements which capture the references to the expressions +/// used for the sort by the wrapping SELECTs, together with their place in the order by list. +fn translate_order_by_target_group( + env: &Env, + state: &mut State, + root_and_current_tables: &RootAndCurrentTables, + element_group: &OrderByElementGroup, +) -> Result, Error> { + let column_or_relationship_select = build_select_and_joins_for_order_by_group( + env, + state, + root_and_current_tables, + element_group, + )?; + + match column_or_relationship_select { + // The column is from the source table, we just need to query it directly. + ColumnsOrSelect::Columns(columns) => Ok(columns + .into_iter() + .map(|(i, direction, field_path, column_name)| { + ( + i, + sql::ast::OrderByElement { + target: wrap_in_field_path( + &field_path, + sql::ast::Expression::ColumnReference(column_name), + ), + direction: match direction { + models::OrderDirection::Asc => sql::ast::OrderByDirection::Asc, + models::OrderDirection::Desc => sql::ast::OrderByDirection::Desc, + }, + }, + ) + }) + .collect()), + } +} + +/// Used as the return type of `build_select_and_joins_for_order_by_group`. +/// Represents the direct references to the requested columns (if path is empty), +/// or a select query describing how to reach the columns. +enum ColumnsOrSelect { + /// Columns represents target columns that are referenced from the current table. + Columns( + Vec<( + usize, // The global order by index for this column. + models::OrderDirection, // The order direction. + FieldPath, // The nested field path. + sql::ast::ColumnReference, // A reference for this column. + )>, + ), +} + +/// Generate a SELECT query representing querying the requested columns/aggregates from a table +/// (potentially a nested one using joins). +fn build_select_and_joins_for_order_by_group( + env: &Env, + _state: &mut State, + root_and_current_tables: &RootAndCurrentTables, + element_group: &OrderByElementGroup, +) -> Result { + // We want to build a select query where "Track" is the root table, and "Artist"."Name" + // is the column we need for the order by. Our query will look like this: + // + // > ( SELECT "Artist"."Name" AS "Name" -- wanted column, might be wrapped with if one is supplied + // > FROM + // > ( SELECT "Album"."ArtistId" ---- required for the next join condition + // > FROM "Album" AS "Album" + // > WHERE "Track"."AlbumId" = "Album"."AlbumId" --- requires 'AlbumId' from 'Track' + // > ) AS "Album" + // > LEFT OUTER JOIN LATERAL + // > ( SELECT "Artist"."Name" AS "Name" ---- the wanted column for the order by + // > FROM "Artist" AS "Artist" ---- the last relationship table + // > WHERE ("Album"."ArtistId" = "Artist"."ArtistId") ---- requires 'ArtistId' from 'Album' + // > ) AS "Artist" ON ('true') + // > ) + // + // Note that "Track" will be supplied by the caller of this function. + + let path = element_group.path(); + + if path.is_empty() { + match element_group { + OrderByElementGroup::Aggregates { .. } => { + // Cannot do an aggregation over an empty path. Must be a relationship. + Err(Error::EmptyPathForOrderByAggregate) + } + OrderByElementGroup::Columns { .. } => { + // If the path is empty, we don't need to build a query, just return the columns. + let table = env.lookup_collection(&root_and_current_tables.current_table.name)?; + let columns = translate_targets( + &table, + &root_and_current_tables.current_table, + element_group, + )? + .into_iter() + .map(|column| { + ( + column.index, + column.direction, + column.field_path, + sql::ast::ColumnReference::AliasedColumn { + table: root_and_current_tables.current_table.reference.clone(), + column: column.alias, + }, + ) + }) + .collect(); + Ok(ColumnsOrSelect::Columns(columns)) + } + } + } + // If we query a relationship, build a wrapping select query selecting the requested columns/aggregates + // for the order by, and build a select of all the joins to select from. + else { + todo!("relationship order by"); + } +} + +/// An expression selected from the target order by table. +struct OrderBySelectExpression { + index: usize, + direction: models::OrderDirection, + field_path: FieldPath, + alias: sql::ast::ColumnAlias, +} + +/// Take an element group and convert all of the elements we want to select +/// to aliases and expressions, along with their order by direction and their index +/// in the order by list. +fn translate_targets( + target_collection: &CollectionInfo, + _table: &TableNameAndReference, + element_group: &OrderByElementGroup, +) -> Result, Error> { + match element_group { + OrderByElementGroup::Columns { columns, path: _ } => { + let columns = columns + .iter() + .map(|element| { + let (Column(target_column_name), field_path) = &element.element; + let selected_column = target_collection.lookup_column(target_column_name)?; + // we are going to deliberately use the table column name and not an alias we get from + // the query request because this is internal to the sorting mechanism. + let selected_column_alias = + sql::helpers::make_column_alias(selected_column.name.0); + + // we use the real name of the column as an alias as well. + Ok::(OrderBySelectExpression { + index: element.index, + direction: element.direction, + alias: selected_column_alias.clone(), + field_path: field_path.clone(), + }) + }) + .collect::, Error>>()?; + + Ok(columns) + } + OrderByElementGroup::Aggregates { aggregates, .. } => { + let aggregates = aggregates + .iter() + .map(|element| { + match &element.element { + Aggregate::CountStarAggregate => { + let column_alias = sql::helpers::make_column_alias("count".to_string()); + Ok(OrderBySelectExpression { + index: element.index, + direction: element.direction, + alias: column_alias, + // Aggregates do not have a field path. + field_path: (&None).into(), + }) + } + Aggregate::SingleColumnAggregate { column } => { + let selected_column = target_collection.lookup_column(column)?; + // we are going to deliberately use the table column name and not an alias we get from + // the query request because this is internal to the sorting mechanism. + let selected_column_alias = + sql::helpers::make_column_alias(selected_column.name.0); + // we use the real name of the column as an alias as well. + Ok(OrderBySelectExpression { + index: element.index, + direction: element.direction, + alias: selected_column_alias.clone(), + // Aggregates do not have a field path. + field_path: (&None).into(), + }) + } + } + }) + .collect::, Error>>()?; + + Ok(aggregates) + } + } +} diff --git a/crates/query-engine/translation/src/translation/query/values.rs b/crates/query-engine/translation/src/translation/query/values.rs new file mode 100644 index 0000000..2510668 --- /dev/null +++ b/crates/query-engine/translation/src/translation/query/values.rs @@ -0,0 +1,179 @@ +//! Handle the translation of literal values. + +use crate::translation::{error::Error, helpers::Env, helpers::State}; +use ndc_models as models; +use query_engine_metadata::metadata::database; +use query_engine_sql::sql; +use query_engine_sql::sql::ast::{ColumnReference, Expression, Value}; +use query_engine_sql::sql::helpers::simple_select; + +/// Convert a JSON value into a SQL value. +pub fn translate_json_value( + env: &Env, + state: &mut State, + value: &serde_json::Value, + r#type: &database::Type, +) -> Result { + match (value, r#type) { + (serde_json::Value::Null, _) => Ok(Expression::Cast { + expression: Box::new(Expression::Value(Value::Null)), + r#type: type_to_ast_scalar_type(env, r#type)?, + }), + (serde_json::Value::Bool(b), _) => Ok(Expression::Value(Value::Bool(*b))), + (serde_json::Value::Number(n), _) => { + let lit = n + .as_f64() + .ok_or(Error::UnableToDeserializeNumberAsF64(n.clone()))?; + Ok(Expression::Value(Value::Float8(lit))) + } + (serde_json::Value::String(str), _) => Ok(Expression::Value(Value::String(str.clone()))), + (serde_json::Value::Array(_), database::Type::ArrayType(_)) => { + let value_expression = + sql::ast::Expression::Value(sql::ast::Value::JsonValue(value.clone())); + translate_projected_variable(env, state, r#type, value_expression) + } + + // If the type is not congruent with the value constructor we simply pass the json value + // raw and cast to the specified type. This allows users to consume any json values, + // treating them either as actual json or as any type that has a cast from json defined. + _ => Ok(sql::ast::Expression::Cast { + expression: Box::new(sql::ast::Expression::Cast { + expression: Box::new(Expression::Value(Value::JsonValue(value.clone()))), + r#type: sql::helpers::jsonb_type(), + }), + r#type: type_to_ast_scalar_type(env, r#type)?, + }), + } +} + +/// Translate a NDC 'Type' to an SQL scalar type. +fn type_to_ast_scalar_type(env: &Env, typ: &database::Type) -> Result { + match typ { + query_engine_metadata::metadata::Type::ArrayType(t) => { + let scalar_type_name = type_to_ast_scalar_type_name(env, t)?; + Ok(sql::ast::ScalarType::ArrayType(scalar_type_name)) + } + _ => Ok(sql::ast::ScalarType::BaseType( + type_to_ast_scalar_type_name(env, typ)?, + )), + } +} + +/// Translate a NDC 'Type' to an SQL type name. +fn type_to_ast_scalar_type_name( + env: &Env, + typ: &database::Type, +) -> Result { + match typ { + query_engine_metadata::metadata::Type::ArrayType(_) => { + Err(Error::NestedArrayTypesNotSupported) + } + query_engine_metadata::metadata::Type::ScalarType(t) => { + let scalar_type: &query_engine_metadata::metadata::ScalarType = + env.lookup_scalar_type(t)?; + Ok(sql::ast::ScalarTypeName::Unqualified( + scalar_type.type_name.to_string(), + )) + } + } +} + +/// Convert a variable into a SQL value. +pub fn translate_variable( + env: &Env, + state: &mut State, + variables_table: sql::ast::TableReference, + variable: &models::VariableName, + r#type: &database::Type, +) -> Result { + let variables_reference = Expression::ColumnReference(ColumnReference::AliasedColumn { + table: variables_table, + column: sql::helpers::make_column_alias(sql::helpers::VARIABLES_FIELD.to_string()), + }); + + // We use the binop '->' to project (as jsonb) the value of a variable from the data column of + // the variable table. + let projected_variable_exp = sql::ast::Expression::BinaryOperation { + left: Box::new(variables_reference), + operator: sql::ast::BinaryOperator("->".to_string()), + right: Box::new(sql::ast::Expression::Value(sql::ast::Value::String( + variable.to_string(), + ))), + }; + + translate_projected_variable(env, state, r#type, projected_variable_exp) +} + +/// Produce a SQL expression that translates an expression of Postgres type 'jsonb' into a given +/// type. +/// +/// For scalar types and object types this is a simple operation, since we can rely on builtin +/// functions. +/// +/// Arrays are more complex since there isn't a builtin function that handles array +/// types. +pub fn translate_projected_variable( + env: &Env, + state: &mut State, + r#type: &database::Type, + exp: sql::ast::Expression, +) -> Result { + let result = match r#type { + // We translate projection of array types into the following sql: + // ``` + // ( SELECT + // array_agg( + // jsonb_populate_record(cast(null as ), "array"."element") + // ) AS "element" + // FROM + // jsonb_array_elements(( ->